You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/06/22 10:09:31 UTC
Build failed in Jenkins: carbondata-master-spark-2.2 » Apache CarbonData :: Examples #1792
See <https://builds.apache.org/job/carbondata-master-spark-2.2/org.apache.carbondata$carbondata-examples/1792/display/redirect>
------------------------------------------
[...truncated 3.02 MB...]
at org.scalatest.Suite$class.run(Suite.scala:1421)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
at org.scalatest.tools.Runner$.main(Runner.scala:860)
at org.scalatest.tools.Runner.main(Runner.scala)
2019-06-22 10:07:56 ERROR CarbonLoadDataCommand:391 - java.lang.RuntimeException: Data Load failed for DataMap. Please check logs for the failure
2019-06-22 10:07:56 ERROR CarbonLoadDataCommand:166 - Got exception java.lang.RuntimeException: Data Load failed for DataMap. Please check logs for the failure when processing data. But this command does not support undo yet, skipping the undo part.
2019-06-22 10:07:56 AUDIT audit:93 - {"time":"June 22, 2019 3:07:56 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885822264966046","opStatus":"FAILED","opTime":"1546 ms","table":"default.timeSeriesTable","extraInfo":{"Exception":"java.lang.RuntimeException","Message":"Data Load failed for DataMap. Please check logs for the failure"}}
[31m- TimeSeriesPreAggregateTableExample *** FAILED ***[0m
[31m java.lang.RuntimeException: Data Load failed for DataMap. Please check logs for the failure[0m
[31m at org.apache.spark.sql.execution.command.preaaggregate.LoadPostAggregateListener$.onEvent(PreAggregateListeners.scala:557)[0m
[31m at org.apache.carbondata.events.OperationListenerBus.fireEvent(OperationListenerBus.java:83)[0m
[31m at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:545)[0m
[31m at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:628)[0m
[31m at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:357)[0m
[31m at org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:148)[0m
[31m at org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:145)[0m
[31m at org.apache.spark.sql.execution.command.Auditable$class.runWithAudit(package.scala:104)[0m
[31m at org.apache.spark.sql.execution.command.AtomicRunnableCommand.runWithAudit(package.scala:141)[0m
[31m at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:145)[0m
[31m ...[0m
2019-06-22 10:07:56 AUDIT audit:72 - {"time":"June 22, 2019 3:07:56 AM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"20885823873644049","opStatus":"START"}
2019-06-22 10:07:57 AUDIT audit:93 - {"time":"June 22, 2019 3:07:57 AM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"20885823873644049","opStatus":"SUCCESS","opTime":"447 ms","table":"default.persontable","extraInfo":{"bad_record_path":"","streaming":"false","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-06-22 10:07:57 AUDIT audit:72 - {"time":"June 22, 2019 3:07:57 AM PDT","username":"jenkins","opName":"LOAD DATA OVERWRITE","opId":"20885824322967073","opStatus":"START"}
2019-06-22 10:07:58 AUDIT audit:93 - {"time":"June 22, 2019 3:07:58 AM PDT","username":"jenkins","opName":"LOAD DATA OVERWRITE","opId":"20885824322967073","opStatus":"SUCCESS","opTime":"903 ms","table":"default.personTable","extraInfo":{"SegmentId":"0","DataSize":"772.47KB","IndexSize":"720.0B"}}
2019-06-22 10:07:58 AUDIT audit:72 - {"time":"June 22, 2019 3:07:58 AM PDT","username":"jenkins","opName":"CREATE DATAMAP","opId":"20885825277153343","opStatus":"START"}
2019-06-22 10:08:00 AUDIT audit:93 - {"time":"June 22, 2019 3:08:00 AM PDT","username":"jenkins","opName":"CREATE DATAMAP","opId":"20885825277153343","opStatus":"SUCCESS","opTime":"1859 ms","table":"default.persontable","extraInfo":{"provider":"lucene","dmName":"dm","index_columns":"id , name"}}
2019-06-22 10:08:00 ERROR DiskBlockObjectWriter:91 - Uncaught exception while reverting partial writes to file /tmp/blockmgr-475c8333-03ad-4838-9585-a519ec2a3525/35/temp_shuffle_606460e4-4bbd-46a1-a1ac-683f98b50035
java.io.FileNotFoundException: /tmp/blockmgr-475c8333-03ad-4838-9585-a519ec2a3525/35/temp_shuffle_606460e4-4bbd-46a1-a1ac-683f98b50035 (No such file or directory)
at java.io.FileOutputStream.open0(Native Method)
at java.io.FileOutputStream.open(FileOutputStream.java:270)
at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
at org.apache.spark.storage.DiskBlockObjectWriter$$anonfun$revertPartialWritesAndClose$2.apply$mcV$sp(DiskBlockObjectWriter.scala:217)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1346)
at org.apache.spark.storage.DiskBlockObjectWriter.revertPartialWritesAndClose(DiskBlockObjectWriter.scala:214)
at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.stop(BypassMergeSortShuffleWriter.java:237)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:102)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
at org.apache.spark.scheduler.Task.run(Task.scala:108)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2019-06-22 10:08:00 ERROR BypassMergeSortShuffleWriter:239 - Error while deleting file /tmp/blockmgr-475c8333-03ad-4838-9585-a519ec2a3525/35/temp_shuffle_606460e4-4bbd-46a1-a1ac-683f98b50035
2019-06-22 10:08:00 ERROR Executor:91 - Exception in task 0.0 in stage 263.0 (TID 688)
java.io.FileNotFoundException: /tmp/blockmgr-475c8333-03ad-4838-9585-a519ec2a3525/35/temp_shuffle_606460e4-4bbd-46a1-a1ac-683f98b50035 (No such file or directory)
at java.io.FileOutputStream.open0(Native Method)
at java.io.FileOutputStream.open(FileOutputStream.java:270)
at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:103)
at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:116)
at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:237)
at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:151)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
at org.apache.spark.scheduler.Task.run(Task.scala:108)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2019-06-22 10:08:00 ERROR TaskSetManager:70 - Task 0 in stage 263.0 failed 1 times; aborting job
[31m- LuceneDataMapExample *** FAILED ***[0m
[31m org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 263.0 failed 1 times, most recent failure: Lost task 0.0 in stage 263.0 (TID 688, localhost, executor driver): java.io.FileNotFoundException: /tmp/blockmgr-475c8333-03ad-4838-9585-a519ec2a3525/35/temp_shuffle_606460e4-4bbd-46a1-a1ac-683f98b50035 (No such file or directory)[0m
[31m at java.io.FileOutputStream.open0(Native Method)[0m
[31m at java.io.FileOutputStream.open(FileOutputStream.java:270)[0m
[31m at java.io.FileOutputStream.<init>(FileOutputStream.java:213)[0m
[31m at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:103)[0m
[31m at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:116)[0m
[31m at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:237)[0m
[31m at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:151)[0m
[31m at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)[0m
[31m at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)[0m
[31m at org.apache.spark.scheduler.Task.run(Task.scala:108)[0m
[31m at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)[0m
[31m at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)[0m
[31m at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)[0m
[31m at java.lang.Thread.run(Thread.java:748)[0m
[31m[0m
[31mDriver stacktrace:[0m
[31m at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1517)[0m
[31m at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1505)[0m
[31m at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1504)[0m
[31m at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)[0m
[31m at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)[0m
[31m at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1504)[0m
[31m at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)[0m
[31m at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)[0m
[31m at scala.Option.foreach(Option.scala:257)[0m
[31m at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)[0m
[31m ...[0m
[31m Cause: java.io.FileNotFoundException: /tmp/blockmgr-475c8333-03ad-4838-9585-a519ec2a3525/35/temp_shuffle_606460e4-4bbd-46a1-a1ac-683f98b50035 (No such file or directory)[0m
[31m at java.io.FileOutputStream.open0(Native Method)[0m
[31m at java.io.FileOutputStream.open(FileOutputStream.java:270)[0m
[31m at java.io.FileOutputStream.<init>(FileOutputStream.java:213)[0m
[31m at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:103)[0m
[31m at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:116)[0m
[31m at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:237)[0m
[31m at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:151)[0m
[31m at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)[0m
[31m at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)[0m
[31m at org.apache.spark.scheduler.Task.run(Task.scala:108)[0m
[31m ...[0m
2019-06-22 10:08:00 AUDIT audit:72 - {"time":"June 22, 2019 3:08:00 AM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"20885827412806072","opStatus":"START"}
2019-06-22 10:08:00 AUDIT audit:93 - {"time":"June 22, 2019 3:08:00 AM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"20885827412806072","opStatus":"SUCCESS","opTime":"76 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-06-22 10:08:00 AUDIT audit:72 - {"time":"June 22, 2019 3:08:00 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885827494148912","opStatus":"START"}
2019-06-22 10:08:00 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-06-22 10:08:00 AUDIT audit:93 - {"time":"June 22, 2019 3:08:00 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885827494148912","opStatus":"SUCCESS","opTime":"151 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.43KB"}}
2019-06-22 10:08:00 AUDIT audit:72 - {"time":"June 22, 2019 3:08:00 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885827649151538","opStatus":"START"}
2019-06-22 10:08:00 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-06-22 10:08:00 AUDIT audit:93 - {"time":"June 22, 2019 3:08:00 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885827649151538","opStatus":"SUCCESS","opTime":"154 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.43KB"}}
2019-06-22 10:08:00 AUDIT audit:72 - {"time":"June 22, 2019 3:08:00 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885827808851427","opStatus":"START"}
2019-06-22 10:08:00 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-06-22 10:08:00 AUDIT audit:93 - {"time":"June 22, 2019 3:08:00 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885827808851427","opStatus":"SUCCESS","opTime":"196 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.43KB"}}
2019-06-22 10:08:00 AUDIT audit:72 - {"time":"June 22, 2019 3:08:00 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885828009817667","opStatus":"START"}
2019-06-22 10:08:01 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-06-22 10:08:01 AUDIT audit:93 - {"time":"June 22, 2019 3:08:01 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885828009817667","opStatus":"SUCCESS","opTime":"184 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.43KB"}}
+--------+
|count(1)|
+--------+
| 40|
+--------+
2019-06-22 10:08:01 AUDIT audit:72 - {"time":"June 22, 2019 3:08:01 AM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"20885828281250382","opStatus":"START"}
2019-06-22 10:08:01 AUDIT audit:93 - {"time":"June 22, 2019 3:08:01 AM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"20885828281250382","opStatus":"SUCCESS","opTime":"46 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
| 40|
+--------+
2019-06-22 10:08:01 AUDIT audit:72 - {"time":"June 22, 2019 3:08:01 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885828396094719","opStatus":"START"}
2019-06-22 10:08:01 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-06-22 10:08:01 AUDIT audit:93 - {"time":"June 22, 2019 3:08:01 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885828396094719","opStatus":"SUCCESS","opTime":"174 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.43KB"}}
2019-06-22 10:08:01 AUDIT audit:72 - {"time":"June 22, 2019 3:08:01 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885828574662045","opStatus":"START"}
2019-06-22 10:08:01 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-06-22 10:08:01 AUDIT audit:93 - {"time":"June 22, 2019 3:08:01 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885828574662045","opStatus":"SUCCESS","opTime":"185 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.43KB"}}
+--------+
|count(1)|
+--------+
| 60|
+--------+
2019-06-22 10:08:01 AUDIT audit:72 - {"time":"June 22, 2019 3:08:01 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"20885828845493638","opStatus":"START"}
2019-06-22 10:08:01 AUDIT audit:93 - {"time":"June 22, 2019 3:08:01 AM PDT","username":"jenkins","opName":"DROP TABLE","opId":"20885828845493638","opStatus":"SUCCESS","opTime":"125 ms","table":"default.origin_table","extraInfo":{}}
[32m- ExternalTableExample[0m
Data:
0 robot0 0 0 9223372036854775807 0.0 true 2019-03-01 2019-02-12 03:03:34.0 12.35 varchar
Hello World From Carbon
1 robot1 1 1 9223372036854775806 0.5 true 2019-03-01 2019-02-12 03:03:34.0 12.35 varchar
Hello World From Carbon
2 robot2 2 2 9223372036854775805 1.0 true 2019-03-01 2019-02-12 03:03:34.0 12.35 varchar
Hello World From Carbon
3 robot3 3 3 9223372036854775804 1.5 true 2019-03-01 2019-02-12 03:03:34.0 12.35 varchar
Hello World From Carbon
4 robot4 4 4 9223372036854775803 2.0 true 2019-03-01 2019-02-12 03:03:34.0 12.35 varchar
Hello World From Carbon
5 robot5 5 5 9223372036854775802 2.5 true 2019-03-01 2019-02-12 03:03:34.0 12.35 varchar
Hello World From Carbon
6 robot6 6 6 9223372036854775801 3.0 true 2019-03-01 2019-02-12 03:03:34.0 12.35 varchar
Hello World From Carbon
7 robot7 7 7 9223372036854775800 3.5 true 2019-03-01 2019-02-12 03:03:34.0 12.35 varchar
Hello World From Carbon
8 robot8 8 8 9223372036854775799 4.0 true 2019-03-01 2019-02-12 03:03:34.0 12.35 varchar
Hello World From Carbon
9 robot9 9 9 9223372036854775798 4.5 true 2019-03-01 2019-02-12 03:03:34.0 12.35 varchar
Hello World From Carbon
Data:
0 robot0 2019-03-01 2019-02-12 03:03:34.0 varchar Hello World From Carbon 0 0 9223372036854775807 0.0 true 12.35
1 robot1 2019-03-01 2019-02-12 03:03:34.0 varchar Hello World From Carbon 1 1 9223372036854775806 0.5 true 12.35
2 robot2 2019-03-01 2019-02-12 03:03:34.0 varchar Hello World From Carbon 2 2 9223372036854775805 1.0 true 12.35
3 robot3 2019-03-01 2019-02-12 03:03:34.0 varchar Hello World From Carbon 3 3 9223372036854775804 1.5 true 12.35
4 robot4 2019-03-01 2019-02-12 03:03:34.0 varchar Hello World From Carbon 4 4 9223372036854775803 2.0 true 12.35
5 robot5 2019-03-01 2019-02-12 03:03:34.0 varchar Hello World From Carbon 5 5 9223372036854775802 2.5 true 12.35
6 robot6 2019-03-01 2019-02-12 03:03:34.0 varchar Hello World From Carbon 6 6 9223372036854775801 3.0 true 12.35
7 robot7 2019-03-01 2019-02-12 03:03:34.0 varchar Hello World From Carbon 7 7 9223372036854775800 3.5 true 12.35
8 robot8 2019-03-01 2019-02-12 03:03:34.0 varchar Hello World From Carbon 8 8 9223372036854775799 4.0 true 12.35
9 robot9 2019-03-01 2019-02-12 03:03:34.0 varchar Hello World From Carbon 9 9 9223372036854775798 4.5 true 12.35
[32m- CarbonReaderExample[0m
2019-06-22 10:08:02 AUDIT audit:72 - {"time":"June 22, 2019 3:08:02 AM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"20885829269463749","opStatus":"START"}
2019-06-22 10:08:02 AUDIT audit:93 - {"time":"June 22, 2019 3:08:02 AM PDT","username":"jenkins","opName":"CREATE TABLE","opId":"20885829269463749","opStatus":"SUCCESS","opTime":"88 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-06-22 10:08:02 AUDIT audit:72 - {"time":"June 22, 2019 3:08:02 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885829369886293","opStatus":"START"}
2019-06-22 10:08:02 AUDIT audit:93 - {"time":"June 22, 2019 3:08:02 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885829369886293","opStatus":"SUCCESS","opTime":"173 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"924.0B","IndexSize":"551.0B"}}
2019-06-22 10:08:02 AUDIT audit:72 - {"time":"June 22, 2019 3:08:02 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885829556068737","opStatus":"START"}
2019-06-22 10:08:02 AUDIT audit:93 - {"time":"June 22, 2019 3:08:02 AM PDT","username":"jenkins","opName":"LOAD DATA","opId":"20885829556068737","opStatus":"SUCCESS","opTime":"175 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"1","DataSize":"924.0B","IndexSize":"551.0B"}}
+---+---------+--------+
| id| name| salary|
+---+---------+--------+
| 1| 'liang'|200000.0|
| 2|'anubhav'| 20000.0|
| 1| 'liang'|200000.0|
| 2|'anubhav'| 20000.0|
+---+---------+--------+
OK
+---++-------++--------------+
| ID|| NAME || SALARY |
+---++-------++--------------+
| 1 || 'liang' || 200000.0 |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0 |
+---++-------++--------------+
| 1 || 'liang' || 200000.0 |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0 |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 4
OK
+--------------+
| NAME |
+---++---------+
| 'liang' |
+---++---------+
| 'anubhav' |
+---++---------+
| 'liang' |
+---++---------+
| 'anubhav' |
+---++---------+
********** Total Rows Fetched When Quering The Individual Columns **********4
OK
+---++-------++--------------+
| Salary|| ID || NAME |
+---++-------++--------------+
| 200000.0 || 1 || 'liang' |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav' |
+---++-------++--------------+
| 200000.0 || 1 || 'liang' |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav' |
+---++-------++--------------+
********** Total Rows Fetched When Quering The Out Of Order Columns **********4
[32m- HiveExample[0m
[36mRun completed in 1 minute, 55 seconds.[0m
[36mTotal number of tests run: 20[0m
[36mSuites: completed 2, aborted 0[0m
[36mTests: succeeded 15, failed 5, canceled 0, ignored 0, pending 0[0m
[31m*** 5 TESTS FAILED ***[0m
[JENKINS] Recording test results
Jenkins build is back to normal : carbondata-master-spark-2.2 » Apache CarbonData :: Examples #1793
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.2/org.apache.carbondata$carbondata-examples/1793/display/redirect>