You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/02/13 09:27:08 UTC

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3384

See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3384/display/redirect>

------------------------------------------
[...truncated 563.07 KB...]
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
2019-02-13 09:25:36 ERROR CarbonLoadDataCommand:391 - java.lang.RuntimeException: Data Load failed for DataMap. Please check logs for the failure
2019-02-13 09:25:36 ERROR CarbonLoadDataCommand:166 - Got exception java.lang.RuntimeException: Data Load failed for DataMap. Please check logs for the failure when processing data. But this command does not support undo yet, skipping the undo part.
2019-02-13 09:25:36 AUDIT audit:93 - {"time":"February 13, 2019 1:25:36 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685962672955446","opStatus":"FAILED","opTime":"1161 ms","table":"default.timeSeriesTable","extraInfo":{"Exception":"java.lang.RuntimeException","Message":"Data Load failed for DataMap. Please check logs for the failure"}}
- TimeSeriesPreAggregateTableExample *** FAILED ***
  java.lang.RuntimeException: Data Load failed for DataMap. Please check logs for the failure
  at org.apache.spark.sql.execution.command.preaaggregate.LoadPostAggregateListener$.onEvent(PreAggregateListeners.scala:557)
  at org.apache.carbondata.events.OperationListenerBus.fireEvent(OperationListenerBus.java:83)
  at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:531)
  at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:630)
  at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:357)
  at org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:148)
  at org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:145)
  at org.apache.spark.sql.execution.command.Auditable$class.runWithAudit(package.scala:104)
  at org.apache.spark.sql.execution.command.AtomicRunnableCommand.runWithAudit(package.scala:141)
  at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:145)
  ...
2019-02-13 09:25:36 AUDIT audit:72 - {"time":"February 13, 2019 1:25:36 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6685963894927023","opStatus":"START"}
2019-02-13 09:25:36 AUDIT audit:93 - {"time":"February 13, 2019 1:25:36 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6685963894927023","opStatus":"SUCCESS","opTime":"151 ms","table":"default.persontable","extraInfo":{"bad_record_path":"","streaming":"false","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-02-13 09:25:36 AUDIT audit:72 - {"time":"February 13, 2019 1:25:36 AM PST","username":"jenkins","opName":"LOAD DATA OVERWRITE","opId":"6685964050291944","opStatus":"START"}
2019-02-13 09:25:37 AUDIT audit:93 - {"time":"February 13, 2019 1:25:37 AM PST","username":"jenkins","opName":"LOAD DATA OVERWRITE","opId":"6685964050291944","opStatus":"SUCCESS","opTime":"886 ms","table":"default.personTable","extraInfo":{"SegmentId":"0","DataSize":"771.08KB","IndexSize":"658.0B"}}
2019-02-13 09:25:37 AUDIT audit:72 - {"time":"February 13, 2019 1:25:37 AM PST","username":"jenkins","opName":"CREATE DATAMAP","opId":"6685964942226008","opStatus":"START"}
2019-02-13 09:25:39 AUDIT audit:93 - {"time":"February 13, 2019 1:25:39 AM PST","username":"jenkins","opName":"CREATE DATAMAP","opId":"6685964942226008","opStatus":"SUCCESS","opTime":"1895 ms","table":"default.persontable","extraInfo":{"provider":"lucene","dmName":"dm","index_columns":"id , name"}}
2019-02-13 09:25:39 ERROR DiskBlockObjectWriter:91 - Uncaught exception while reverting partial writes to file /tmp/blockmgr-403aba24-df9b-49c6-957c-2aeab661375b/34/temp_shuffle_8ebcab66-d801-42d5-a591-3b92c801cbab
java.io.FileNotFoundException: /tmp/blockmgr-403aba24-df9b-49c6-957c-2aeab661375b/34/temp_shuffle_8ebcab66-d801-42d5-a591-3b92c801cbab (No such file or directory)
	at java.io.FileOutputStream.open0(Native Method)
	at java.io.FileOutputStream.open(FileOutputStream.java:270)
	at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
	at org.apache.spark.storage.DiskBlockObjectWriter.revertPartialWritesAndClose(DiskBlockObjectWriter.scala:210)
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.stop(BypassMergeSortShuffleWriter.java:238)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:102)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
	at org.apache.spark.scheduler.Task.run(Task.scala:99)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
2019-02-13 09:25:39 ERROR BypassMergeSortShuffleWriter:240 - Error while deleting file /tmp/blockmgr-403aba24-df9b-49c6-957c-2aeab661375b/34/temp_shuffle_8ebcab66-d801-42d5-a591-3b92c801cbab
2019-02-13 09:25:39 ERROR Executor:91 - Exception in task 0.0 in stage 366.0 (TID 2368)
java.io.FileNotFoundException: /tmp/blockmgr-403aba24-df9b-49c6-957c-2aeab661375b/34/temp_shuffle_8ebcab66-d801-42d5-a591-3b92c801cbab (No such file or directory)
	at java.io.FileOutputStream.open0(Native Method)
	at java.io.FileOutputStream.open(FileOutputStream.java:270)
	at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
	at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:102)
	at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:115)
	at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:229)
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:152)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
	at org.apache.spark.scheduler.Task.run(Task.scala:99)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
2019-02-13 09:25:39 ERROR TaskSetManager:70 - Task 0 in stage 366.0 failed 1 times; aborting job
- LuceneDataMapExample *** FAILED ***
  org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 366.0 failed 1 times, most recent failure: Lost task 0.0 in stage 366.0 (TID 2368, localhost, executor driver): java.io.FileNotFoundException: /tmp/blockmgr-403aba24-df9b-49c6-957c-2aeab661375b/34/temp_shuffle_8ebcab66-d801-42d5-a591-3b92c801cbab (No such file or directory)
	at java.io.FileOutputStream.open0(Native Method)
	at java.io.FileOutputStream.open(FileOutputStream.java:270)
	at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
	at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:102)
	at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:115)
	at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:229)
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:152)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
	at org.apache.spark.scheduler.Task.run(Task.scala:99)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

Driver stacktrace:
  at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422)
  at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
  at scala.Option.foreach(Option.scala:257)
  at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802)
  ...
  Cause: java.io.FileNotFoundException: /tmp/blockmgr-403aba24-df9b-49c6-957c-2aeab661375b/34/temp_shuffle_8ebcab66-d801-42d5-a591-3b92c801cbab (No such file or directory)
  at java.io.FileOutputStream.open0(Native Method)
  at java.io.FileOutputStream.open(FileOutputStream.java:270)
  at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
  at org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:102)
  at org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:115)
  at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:229)
  at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:152)
  at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
  at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
  at org.apache.spark.scheduler.Task.run(Task.scala:99)
  ...
2019-02-13 09:25:39 AUDIT audit:72 - {"time":"February 13, 2019 1:25:39 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6685967032794602","opStatus":"START"}
2019-02-13 09:25:40 AUDIT audit:93 - {"time":"February 13, 2019 1:25:40 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6685967032794602","opStatus":"SUCCESS","opTime":"927 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-02-13 09:25:40 AUDIT audit:72 - {"time":"February 13, 2019 1:25:40 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685967967475001","opStatus":"START"}
2019-02-13 09:25:40 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-02-13 09:25:41 AUDIT audit:93 - {"time":"February 13, 2019 1:25:41 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685967967475001","opStatus":"SUCCESS","opTime":"941 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-02-13 09:25:41 AUDIT audit:72 - {"time":"February 13, 2019 1:25:41 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685968918037192","opStatus":"START"}
2019-02-13 09:25:41 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-02-13 09:25:41 AUDIT audit:93 - {"time":"February 13, 2019 1:25:41 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685968918037192","opStatus":"SUCCESS","opTime":"341 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-02-13 09:25:41 AUDIT audit:72 - {"time":"February 13, 2019 1:25:41 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685969267345971","opStatus":"START"}
2019-02-13 09:25:41 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-02-13 09:25:41 AUDIT audit:93 - {"time":"February 13, 2019 1:25:41 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685969267345971","opStatus":"SUCCESS","opTime":"184 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-02-13 09:25:41 AUDIT audit:72 - {"time":"February 13, 2019 1:25:41 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685969462767916","opStatus":"START"}
2019-02-13 09:25:42 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-02-13 09:25:42 AUDIT audit:93 - {"time":"February 13, 2019 1:25:42 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685969462767916","opStatus":"SUCCESS","opTime":"192 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-02-13 09:25:42 AUDIT audit:72 - {"time":"February 13, 2019 1:25:42 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6685969753584895","opStatus":"START"}
2019-02-13 09:25:42 AUDIT audit:93 - {"time":"February 13, 2019 1:25:42 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6685969753584895","opStatus":"SUCCESS","opTime":"76 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-02-13 09:25:42 AUDIT audit:72 - {"time":"February 13, 2019 1:25:42 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685969889706394","opStatus":"START"}
2019-02-13 09:25:42 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-02-13 09:25:42 AUDIT audit:93 - {"time":"February 13, 2019 1:25:42 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685969889706394","opStatus":"SUCCESS","opTime":"164 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-02-13 09:25:42 AUDIT audit:72 - {"time":"February 13, 2019 1:25:42 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685970059638965","opStatus":"START"}
2019-02-13 09:25:42 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-02-13 09:25:42 AUDIT audit:93 - {"time":"February 13, 2019 1:25:42 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685970059638965","opStatus":"SUCCESS","opTime":"165 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2019-02-13 09:25:42 AUDIT audit:72 - {"time":"February 13, 2019 1:25:42 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6685970301739426","opStatus":"START"}
2019-02-13 09:25:42 AUDIT audit:93 - {"time":"February 13, 2019 1:25:42 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6685970301739426","opStatus":"SUCCESS","opTime":"84 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample

Data:
0	robot0	0	0	9223372036854775807	0.0	true	2019-03-01	2019-02-12 03:03:34.0	12.35	varchar	
Hello World From Carbon 
1	robot1	1	1	9223372036854775806	0.5	true	2019-03-01	2019-02-12 03:03:34.0	12.35	varchar	
Hello World From Carbon 
2	robot2	2	2	9223372036854775805	1.0	true	2019-03-01	2019-02-12 03:03:34.0	12.35	varchar	
Hello World From Carbon 
3	robot3	3	3	9223372036854775804	1.5	true	2019-03-01	2019-02-12 03:03:34.0	12.35	varchar	
Hello World From Carbon 
4	robot4	4	4	9223372036854775803	2.0	true	2019-03-01	2019-02-12 03:03:34.0	12.35	varchar	
Hello World From Carbon 
5	robot5	5	5	9223372036854775802	2.5	true	2019-03-01	2019-02-12 03:03:34.0	12.35	varchar	
Hello World From Carbon 
6	robot6	6	6	9223372036854775801	3.0	true	2019-03-01	2019-02-12 03:03:34.0	12.35	varchar	
Hello World From Carbon 
7	robot7	7	7	9223372036854775800	3.5	true	2019-03-01	2019-02-12 03:03:34.0	12.35	varchar	
Hello World From Carbon 
8	robot8	8	8	9223372036854775799	4.0	true	2019-03-01	2019-02-12 03:03:34.0	12.35	varchar	
Hello World From Carbon 
9	robot9	9	9	9223372036854775798	4.5	true	2019-03-01	2019-02-12 03:03:34.0	12.35	varchar	
Hello World From Carbon 

Data:
0	robot0	2019-03-01	2019-02-12 03:03:34.0	varchar	Hello World From Carbon 	0	0	9223372036854775807	0.0	true	12.35	
1	robot1	2019-03-01	2019-02-12 03:03:34.0	varchar	Hello World From Carbon 	1	1	9223372036854775806	0.5	true	12.35	
2	robot2	2019-03-01	2019-02-12 03:03:34.0	varchar	Hello World From Carbon 	2	2	9223372036854775805	1.0	true	12.35	
3	robot3	2019-03-01	2019-02-12 03:03:34.0	varchar	Hello World From Carbon 	3	3	9223372036854775804	1.5	true	12.35	
4	robot4	2019-03-01	2019-02-12 03:03:34.0	varchar	Hello World From Carbon 	4	4	9223372036854775803	2.0	true	12.35	
5	robot5	2019-03-01	2019-02-12 03:03:34.0	varchar	Hello World From Carbon 	5	5	9223372036854775802	2.5	true	12.35	
6	robot6	2019-03-01	2019-02-12 03:03:34.0	varchar	Hello World From Carbon 	6	6	9223372036854775801	3.0	true	12.35	
7	robot7	2019-03-01	2019-02-12 03:03:34.0	varchar	Hello World From Carbon 	7	7	9223372036854775800	3.5	true	12.35	
8	robot8	2019-03-01	2019-02-12 03:03:34.0	varchar	Hello World From Carbon 	8	8	9223372036854775799	4.0	true	12.35	
9	robot9	2019-03-01	2019-02-12 03:03:34.0	varchar	Hello World From Carbon 	9	9	9223372036854775798	4.5	true	12.35	
- CarbonReaderExample
2019-02-13 09:25:43 AUDIT audit:72 - {"time":"February 13, 2019 1:25:43 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6685970633668222","opStatus":"START"}
2019-02-13 09:25:43 AUDIT audit:93 - {"time":"February 13, 2019 1:25:43 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6685970633668222","opStatus":"SUCCESS","opTime":"62 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-02-13 09:25:43 AUDIT audit:72 - {"time":"February 13, 2019 1:25:43 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685970713786341","opStatus":"START"}
2019-02-13 09:25:43 AUDIT audit:93 - {"time":"February 13, 2019 1:25:43 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6685970713786341","opStatus":"SUCCESS","opTime":"150 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"499.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Run completed in 2 minutes, 30 seconds.
Total number of tests run: 20
Suites: completed 2, aborted 0
Tests: succeeded 17, failed 3, canceled 0, ignored 0, pending 0
*** 3 TESTS FAILED ***
[JENKINS] Recording test results