You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2018/12/28 03:17:13 UTC

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3270

See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3270/display/redirect>

------------------------------------------
[...truncated 44.37 KB...]
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
     [java]   Exception analyzing org.apache.carbondata.examples.SparkSessionExample$ using detector edu.umd.cs.findbugs.detect.FindBadCast2
     [java]     java.lang.IllegalArgumentException: Can't push void
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.pushValue(TypeFrameModelingVisitor.java:241)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitInvokeInstructionCommon(TypeFrameModelingVisitor.java:726)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitINVOKEINTERFACE(TypeFrameModelingVisitor.java:438)
     [java]       At org.apache.bcel.generic.INVOKEINTERFACE.accept(INVOKEINTERFACE.java:137)
     [java]       At edu.umd.cs.findbugs.ba.AbstractFrameModelingVisitor.analyzeInstruction(AbstractFrameModelingVisitor.java:84)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.analyzeInstruction(TypeFrameModelingVisitor.java:197)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:406)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.AbstractDataflowAnalysis.transfer(AbstractDataflowAnalysis.java:135)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:414)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.Dataflow.execute(Dataflow.java:376)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
     [java]   Failure examining basic blocks in Duplicate Branches detector
     [java]     java.lang.IllegalArgumentException: Can't push void
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.pushValue(TypeFrameModelingVisitor.java:241)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitInvokeInstructionCommon(TypeFrameModelingVisitor.java:726)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitINVOKEINTERFACE(TypeFrameModelingVisitor.java:438)
     [java]       At org.apache.bcel.generic.INVOKEINTERFACE.accept(INVOKEINTERFACE.java:137)
     [java]       At edu.umd.cs.findbugs.ba.AbstractFrameModelingVisitor.analyzeInstruction(AbstractFrameModelingVisitor.java:84)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.analyzeInstruction(TypeFrameModelingVisitor.java:197)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:406)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.AbstractDataflowAnalysis.transfer(AbstractDataflowAnalysis.java:135)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:414)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.Dataflow.execute(Dataflow.java:376)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
     [java]   Exception analyzing org.apache.carbondata.examples.SparkSessionExample$ using detector edu.umd.cs.findbugs.detect.FindUnrelatedTypesInGenericContainer
     [java]     java.lang.IllegalArgumentException: Can't push void
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.pushValue(TypeFrameModelingVisitor.java:241)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitInvokeInstructionCommon(TypeFrameModelingVisitor.java:726)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitINVOKEINTERFACE(TypeFrameModelingVisitor.java:438)
     [java]       At org.apache.bcel.generic.INVOKEINTERFACE.accept(INVOKEINTERFACE.java:137)
     [java]       At edu.umd.cs.findbugs.ba.AbstractFrameModelingVisitor.analyzeInstruction(AbstractFrameModelingVisitor.java:84)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.analyzeInstruction(TypeFrameModelingVisitor.java:197)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:406)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.AbstractDataflowAnalysis.transfer(AbstractDataflowAnalysis.java:135)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:414)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.Dataflow.execute(Dataflow.java:376)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
[INFO] Done FindBugs Analysis....
[INFO] 
[INFO] <<< findbugs-maven-plugin:3.0.4:check (analyze-compile) < :findbugs @ carbondata-examples <<<
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.4:check (analyze-compile) @ carbondata-examples ---
[INFO] BugInstance size is 0
[INFO] Error size is 14
[INFO] No errors/warnings found
[INFO] 
[INFO] --- maven-scala-plugin:2.15.2:compile (compile) @ carbondata-examples ---
[INFO] Checking for multiple versions of scala
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] includes = [**/*.java,**/*.scala,]
[INFO] excludes = []
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-resources-plugin:2.7:testResources (default-testResources) @ carbondata-examples ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/src/test/resources>
[INFO] Copying 3 resources
[INFO] 
[INFO] --- maven-compiler-plugin:3.2:testCompile (default-testCompile) @ carbondata-examples ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-surefire-plugin:2.18:test (default-test) @ carbondata-examples ---
[JENKINS] Recording test results
[INFO] 
[INFO] --- maven-scala-plugin:2.15.2:testCompile (testCompile) @ carbondata-examples ---
[INFO] Checking for multiple versions of scala
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] includes = [**/*.java,**/*.scala,]
[INFO] excludes = []
[INFO] <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/src/test/scala>:-1: info: compiling
[INFO] Compiling 1 source files to <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/target/test-classes> at 1545966977830
[INFO] prepare-compile in 0 s
[INFO] compile in 6 s
[INFO] 
[INFO] --- scalatest-maven-plugin:1.0:test (test) @ carbondata-examples ---
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=512m; support was removed in 8.0
Discovery starting.
2018-12-28 03:16:25 INFO  CarbonProperties:707 - Property file path: <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/../../../conf/carbon.properties>
2018-12-28 03:16:25 INFO  CarbonProperties:900 - ------Using Carbon.properties --------
2018-12-28 03:16:25 INFO  CarbonProperties:901 - {}
2018-12-28 03:16:25 INFO  CarbonProperties:693 - Considered file format is: V3
2018-12-28 03:16:25 INFO  CarbonProperties:587 - Blocklet Size Configured value is "64
2018-12-28 03:16:25 WARN  CarbonProperties:465 - The enable unsafe sort value "null" is invalid. Using the default value "true
2018-12-28 03:16:25 WARN  CarbonProperties:477 - The enable off heap sort value "null" is invalid. Using the default value "true
2018-12-28 03:16:25 WARN  CarbonProperties:438 - The custom block distribution value "null" is invalid. Using the default value "false
2018-12-28 03:16:25 WARN  CarbonProperties:425 - The enable vector reader value "null" is invalid. Using the default value "true
2018-12-28 03:16:26 WARN  CarbonProperties:453 - The carbon task distribution value "null" is invalid. Using the default value "block
2018-12-28 03:16:26 WARN  CarbonProperties:556 - The enable auto handoff value "null" is invalid. Using the default value "true
2018-12-28 03:16:26 WARN  CarbonProperties:1298 - The specified value for property 512is invalid.
2018-12-28 03:16:26 WARN  CarbonProperties:1309 - The specified value for property carbon.sort.storage.inmemory.size.inmbis invalid. Taking the default value.512
2018-12-28 03:16:26 INFO  CarbonProperties:1512 - Considered value for min max byte limit for string is: 200
2018-12-28 03:16:26 INFO  TestQueryExecutor$:70 - project path: /home/jenkins/jenkins-slave/workspace/carbondata-master-spark-2.1
2018-12-28 03:16:26 INFO  TestQueryExecutor$:148 - Store path taken <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/target//store>
2018-12-28 03:16:26 INFO  TestQueryExecutor$:149 - Warehouse path taken <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/target//warehouse>
2018-12-28 03:16:26 INFO  TestQueryExecutor$:150 - Resource path taken /home/jenkins/jenkins-slave/workspace/carbondata-master-spark-2.1/integration/spark-common-test/src/test/resources
2018-12-28 03:16:26 INFO  Spark2TestQueryExecutor$:44 - use TestQueryExecutorImplV2
*** RUN ABORTED ***
  java.lang.ExceptionInInitializerError:
  at org.apache.spark.sql.test.Spark2TestQueryExecutor.sqlContext(Spark2TestQueryExecutor.scala:37)
  at org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
  at org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  at java.lang.Class.newInstance(Class.java:442)
  at org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
  at org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
  ...
  Cause: java.util.NoSuchElementException: None.get
  at scala.None$.get(Option.scala:347)
  at scala.None$.get(Option.scala:345)
  at org.apache.spark.util.CarbonReflectionUtils$.updateCarbonSerdeInfo(CarbonReflectionUtils.scala:346)
  at org.apache.spark.sql.CarbonSession$CarbonBuilder.getOrCreateCarbonSession(CarbonSession.scala:183)
  at org.apache.spark.sql.test.Spark2TestQueryExecutor$.<init>(Spark2TestQueryExecutor.scala:70)
  at org.apache.spark.sql.test.Spark2TestQueryExecutor$.<clinit>(Spark2TestQueryExecutor.scala)
  at org.apache.spark.sql.test.Spark2TestQueryExecutor.sqlContext(Spark2TestQueryExecutor.scala:37)
  at org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
  at org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  ...
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3286

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3286/display/redirect>

------------------------------------------
[...truncated 267.22 KB...]
+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good4|who and name5|
|which test1 good2|who and name5|
|which test1 good0|who and name5|
|which test1 good0|who and name0|
|which test1 good4|who and name5|
|which test1 good2|who and name5|
|which test1 good0|who and name5|
|which test1 good0|who and name0|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good4|who and name5|
|which test1 good2|who and name5|
|which test1 good0|who and name5|
|which test1 good0|who and name0|
|which test1 good4|who and name5|
|which test1 good2|who and name5|
|which test1 good0|who and name5|
|which test1 good0|who and name0|
+-----------------+-------------+

2019-01-02 15:26:27 AUDIT audit:72 - {"time":"January 2, 2019 7:26:27 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"3078814790565858","opStatus":"START"}
2019-01-02 15:26:27 AUDIT audit:93 - {"time":"January 2, 2019 7:26:27 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"3078814790565858","opStatus":"SUCCESS","opTime":"108 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2019-01-02 15:26:27 AUDIT audit:72 - {"time":"January 2, 2019 7:26:27 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3078814924802858","opStatus":"START"}
2019-01-02 15:26:27 AUDIT audit:93 - {"time":"January 2, 2019 7:26:27 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3078814924802858","opStatus":"SUCCESS","opTime":"67 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-02 15:26:27 AUDIT audit:72 - {"time":"January 2, 2019 7:26:27 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078814996911678","opStatus":"START"}
2019-01-02 15:26:27 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 15:26:27 AUDIT audit:93 - {"time":"January 2, 2019 7:26:27 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078814996911678","opStatus":"SUCCESS","opTime":"170 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-02 15:26:27 AUDIT audit:72 - {"time":"January 2, 2019 7:26:27 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078815173003741","opStatus":"START"}
2019-01-02 15:26:27 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 15:26:27 AUDIT audit:93 - {"time":"January 2, 2019 7:26:27 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078815173003741","opStatus":"SUCCESS","opTime":"177 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-02 15:26:27 AUDIT audit:72 - {"time":"January 2, 2019 7:26:27 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078815358178807","opStatus":"START"}
2019-01-02 15:26:27 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 15:26:27 AUDIT audit:93 - {"time":"January 2, 2019 7:26:27 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078815358178807","opStatus":"SUCCESS","opTime":"165 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-02 15:26:28 AUDIT audit:72 - {"time":"January 2, 2019 7:26:28 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078815530423097","opStatus":"START"}
2019-01-02 15:26:28 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 15:26:28 AUDIT audit:93 - {"time":"January 2, 2019 7:26:28 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078815530423097","opStatus":"SUCCESS","opTime":"199 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-01-02 15:26:28 AUDIT audit:72 - {"time":"January 2, 2019 7:26:28 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3078815814013093","opStatus":"START"}
2019-01-02 15:26:28 AUDIT audit:93 - {"time":"January 2, 2019 7:26:28 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3078815814013093","opStatus":"SUCCESS","opTime":"70 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-01-02 15:26:28 AUDIT audit:72 - {"time":"January 2, 2019 7:26:28 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078815944032852","opStatus":"START"}
2019-01-02 15:26:28 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 15:26:28 AUDIT audit:93 - {"time":"January 2, 2019 7:26:28 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078815944032852","opStatus":"SUCCESS","opTime":"158 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-02 15:26:28 AUDIT audit:72 - {"time":"January 2, 2019 7:26:28 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078816106881411","opStatus":"START"}
2019-01-02 15:26:28 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 15:26:28 AUDIT audit:93 - {"time":"January 2, 2019 7:26:28 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078816106881411","opStatus":"SUCCESS","opTime":"159 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2019-01-02 15:26:28 AUDIT audit:72 - {"time":"January 2, 2019 7:26:28 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"3078816343368957","opStatus":"START"}
2019-01-02 15:26:28 AUDIT audit:93 - {"time":"January 2, 2019 7:26:28 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"3078816343368957","opStatus":"SUCCESS","opTime":"51 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2019-01-02 15:26:29 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_3078816470739491

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2019-01-02 15:26:29 AUDIT audit:72 - {"time":"January 2, 2019 7:26:29 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3078816624951073","opStatus":"START"}
2019-01-02 15:26:29 AUDIT audit:93 - {"time":"January 2, 2019 7:26:29 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3078816624951073","opStatus":"SUCCESS","opTime":"83 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-02 15:26:29 AUDIT audit:72 - {"time":"January 2, 2019 7:26:29 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078816727891425","opStatus":"START"}
2019-01-02 15:26:29 AUDIT audit:93 - {"time":"January 2, 2019 7:26:29 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3078816727891425","opStatus":"SUCCESS","opTime":"161 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 38 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3285

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3285/display/redirect>

------------------------------------------
[...truncated 267.38 KB...]
|which test1 good4|who and name4|
|which test1 good7|who and name1|
|which test1 good7|who and name5|
|which test1 good0|who and name2|
|which test1 good7|who and name3|
|which test1 good4|who and name4|
|which test1 good7|who and name1|
|which test1 good7|who and name5|
|which test1 good0|who and name2|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good7|who and name3|
|which test1 good4|who and name4|
|which test1 good7|who and name1|
|which test1 good7|who and name5|
|which test1 good0|who and name2|
|which test1 good7|who and name3|
|which test1 good4|who and name4|
|which test1 good7|who and name1|
|which test1 good7|who and name5|
|which test1 good0|who and name2|
+-----------------+-------------+

2019-01-02 09:23:45 AUDIT audit:72 - {"time":"January 2, 2019 1:23:45 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"3057260344119375","opStatus":"START"}
2019-01-02 09:23:45 AUDIT audit:93 - {"time":"January 2, 2019 1:23:45 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"3057260344119375","opStatus":"SUCCESS","opTime":"154 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2019-01-02 09:23:45 AUDIT audit:72 - {"time":"January 2, 2019 1:23:45 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3057260513382728","opStatus":"START"}
2019-01-02 09:23:45 AUDIT audit:93 - {"time":"January 2, 2019 1:23:45 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3057260513382728","opStatus":"SUCCESS","opTime":"66 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-02 09:23:45 AUDIT audit:72 - {"time":"January 2, 2019 1:23:45 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057260583744070","opStatus":"START"}
2019-01-02 09:23:45 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 09:23:45 AUDIT audit:93 - {"time":"January 2, 2019 1:23:45 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057260583744070","opStatus":"SUCCESS","opTime":"176 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-02 09:23:45 AUDIT audit:72 - {"time":"January 2, 2019 1:23:45 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057260765177922","opStatus":"START"}
2019-01-02 09:23:46 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 09:23:46 AUDIT audit:93 - {"time":"January 2, 2019 1:23:46 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057260765177922","opStatus":"SUCCESS","opTime":"169 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-02 09:23:46 AUDIT audit:72 - {"time":"January 2, 2019 1:23:46 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057260940440141","opStatus":"START"}
2019-01-02 09:23:46 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 09:23:46 AUDIT audit:93 - {"time":"January 2, 2019 1:23:46 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057260940440141","opStatus":"SUCCESS","opTime":"162 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-02 09:23:46 AUDIT audit:72 - {"time":"January 2, 2019 1:23:46 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057261108718934","opStatus":"START"}
2019-01-02 09:23:46 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 09:23:46 AUDIT audit:93 - {"time":"January 2, 2019 1:23:46 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057261108718934","opStatus":"SUCCESS","opTime":"170 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-01-02 09:23:46 AUDIT audit:72 - {"time":"January 2, 2019 1:23:46 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3057261349056959","opStatus":"START"}
2019-01-02 09:23:46 AUDIT audit:93 - {"time":"January 2, 2019 1:23:46 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3057261349056959","opStatus":"SUCCESS","opTime":"55 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-01-02 09:23:46 AUDIT audit:72 - {"time":"January 2, 2019 1:23:46 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057261471245643","opStatus":"START"}
2019-01-02 09:23:46 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 09:23:46 AUDIT audit:93 - {"time":"January 2, 2019 1:23:46 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057261471245643","opStatus":"SUCCESS","opTime":"165 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-02 09:23:46 AUDIT audit:72 - {"time":"January 2, 2019 1:23:46 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057261642715584","opStatus":"START"}
2019-01-02 09:23:46 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 09:23:47 AUDIT audit:93 - {"time":"January 2, 2019 1:23:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057261642715584","opStatus":"SUCCESS","opTime":"156 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2019-01-02 09:23:47 AUDIT audit:72 - {"time":"January 2, 2019 1:23:47 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"3057261857520180","opStatus":"START"}
2019-01-02 09:23:47 AUDIT audit:93 - {"time":"January 2, 2019 1:23:47 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"3057261857520180","opStatus":"SUCCESS","opTime":"39 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2019-01-02 09:23:47 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_3057261992976589

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2019-01-02 09:23:47 AUDIT audit:72 - {"time":"January 2, 2019 1:23:47 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3057262139191649","opStatus":"START"}
2019-01-02 09:23:47 AUDIT audit:93 - {"time":"January 2, 2019 1:23:47 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"3057262139191649","opStatus":"SUCCESS","opTime":"73 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-02 09:23:47 AUDIT audit:72 - {"time":"January 2, 2019 1:23:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057262231522433","opStatus":"START"}
2019-01-02 09:23:47 AUDIT audit:93 - {"time":"January 2, 2019 1:23:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"3057262231522433","opStatus":"SUCCESS","opTime":"149 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 31 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3284

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3284/display/redirect>

------------------------------------------
[...truncated 271.49 KB...]
|      40|
+--------+

2019-01-02 08:00:46 AUDIT audit:72 - {"time":"January 2, 2019 12:00:46 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6173127523117489","opStatus":"START"}
2019-01-02 08:00:46 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 08:00:47 AUDIT audit:93 - {"time":"January 2, 2019 12:00:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6173127523117489","opStatus":"SUCCESS","opTime":"187 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-02 08:00:47 AUDIT audit:72 - {"time":"January 2, 2019 12:00:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6173127738583917","opStatus":"START"}
2019-01-02 08:00:47 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-02 08:00:47 AUDIT audit:93 - {"time":"January 2, 2019 12:00:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6173127738583917","opStatus":"SUCCESS","opTime":"186 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2019-01-02 08:00:47 AUDIT audit:72 - {"time":"January 2, 2019 12:00:47 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6173127984431170","opStatus":"START"}
2019-01-02 08:00:47 AUDIT audit:93 - {"time":"January 2, 2019 12:00:47 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6173127984431170","opStatus":"SUCCESS","opTime":"62 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2019-01-02 08:00:47 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_6173128178460653

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2019-01-02 08:00:47 AUDIT audit:72 - {"time":"January 2, 2019 12:00:47 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6173128343752924","opStatus":"START"}
2019-01-02 08:00:47 AUDIT audit:93 - {"time":"January 2, 2019 12:00:47 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6173128343752924","opStatus":"SUCCESS","opTime":"125 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-02 08:00:47 AUDIT audit:72 - {"time":"January 2, 2019 12:00:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6173128497629888","opStatus":"START"}
2019-01-02 08:00:47 AUDIT audit:93 - {"time":"January 2, 2019 12:00:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6173128497629888","opStatus":"SUCCESS","opTime":"200 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
2019-01-02 08:01:27 ERROR OperationLog:166 - Failed to remove corresponding log file of operation: OperationHandle [opType=EXECUTE_STATEMENT, getHandleIdentifier()=f410ec33-0b23-49f9-8436-d1decc26b62e]
java.io.FileNotFoundException: File does not exist: /tmp/jenkins/operation_logs/934121b5-9057-408f-93be-eb9c6780537b/f410ec33-0b23-49f9-8436-d1decc26b62e
	at org.apache.commons.io.FileUtils.forceDelete(FileUtils.java:2275)
	at org.apache.hadoop.hive.ql.session.OperationLog$LogFile.remove(OperationLog.java:163)
	at org.apache.hadoop.hive.ql.session.OperationLog.close(OperationLog.java:121)
	at org.apache.hive.service.cli.operation.Operation.cleanupOperationLog(Operation.java:269)
	at org.apache.hive.service.cli.operation.SQLOperation.close(SQLOperation.java:317)
	at org.apache.hive.service.cli.operation.OperationManager.closeOperation(OperationManager.java:204)
	at org.apache.hive.service.cli.session.HiveSessionImpl.close(HiveSessionImpl.java:557)
	at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.close(HiveSessionImplwithUGI.java:110)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:78)
	at org.apache.hive.service.cli.session.HiveSessionProxy.access$000(HiveSessionProxy.java:36)
	at org.apache.hive.service.cli.session.HiveSessionProxy$1.run(HiveSessionProxy.java:63)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
	at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:59)
	at com.sun.proxy.$Proxy39.close(Unknown Source)
	at org.apache.hive.service.cli.session.SessionManager.closeSession(SessionManager.java:292)
	at org.apache.hive.service.cli.CLIService.closeSession(CLIService.java:237)
	at org.apache.hive.service.cli.thrift.ThriftCLIService$1.deleteContext(ThriftCLIService.java:120)
	at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:300)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
2019-01-02 08:01:27 ERROR HiveSessionImpl:591 - Failed to cleanup session log dir: SessionHandle [934121b5-9057-408f-93be-eb9c6780537b]
java.io.FileNotFoundException: File does not exist: /tmp/jenkins/operation_logs/934121b5-9057-408f-93be-eb9c6780537b
	at org.apache.commons.io.FileUtils.forceDelete(FileUtils.java:2275)
	at org.apache.hive.service.cli.session.HiveSessionImpl.cleanupSessionLogDir(HiveSessionImpl.java:589)
	at org.apache.hive.service.cli.session.HiveSessionImpl.close(HiveSessionImpl.java:561)
	at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.close(HiveSessionImplwithUGI.java:110)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:78)
	at org.apache.hive.service.cli.session.HiveSessionProxy.access$000(HiveSessionProxy.java:36)
	at org.apache.hive.service.cli.session.HiveSessionProxy$1.run(HiveSessionProxy.java:63)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
	at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:59)
	at com.sun.proxy.$Proxy39.close(Unknown Source)
	at org.apache.hive.service.cli.session.SessionManager.closeSession(SessionManager.java:292)
	at org.apache.hive.service.cli.CLIService.closeSession(CLIService.java:237)
	at org.apache.hive.service.cli.thrift.ThriftCLIService$1.deleteContext(ThriftCLIService.java:120)
	at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:300)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 3 minutes, 6 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3283

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3283/display/redirect>

------------------------------------------
[...truncated 271.55 KB...]
|      40|
+--------+

2019-01-01 16:24:47 AUDIT audit:72 - {"time":"January 1, 2019 8:24:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2996122327324104","opStatus":"START"}
2019-01-01 16:24:47 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 16:24:47 AUDIT audit:93 - {"time":"January 1, 2019 8:24:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2996122327324104","opStatus":"SUCCESS","opTime":"181 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 16:24:47 AUDIT audit:72 - {"time":"January 1, 2019 8:24:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2996122515452304","opStatus":"START"}
2019-01-01 16:24:47 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 16:24:47 AUDIT audit:93 - {"time":"January 1, 2019 8:24:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2996122515452304","opStatus":"SUCCESS","opTime":"180 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2019-01-01 16:24:47 AUDIT audit:72 - {"time":"January 1, 2019 8:24:47 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2996122764423440","opStatus":"START"}
2019-01-01 16:24:48 AUDIT audit:93 - {"time":"January 1, 2019 8:24:48 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2996122764423440","opStatus":"SUCCESS","opTime":"50 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2019-01-01 16:24:48 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_2996122907077313

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2019-01-01 16:24:48 AUDIT audit:72 - {"time":"January 1, 2019 8:24:48 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2996123053835902","opStatus":"START"}
2019-01-01 16:24:48 AUDIT audit:93 - {"time":"January 1, 2019 8:24:48 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2996123053835902","opStatus":"SUCCESS","opTime":"75 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-01 16:24:48 AUDIT audit:72 - {"time":"January 1, 2019 8:24:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2996123148571605","opStatus":"START"}
2019-01-01 16:24:48 AUDIT audit:93 - {"time":"January 1, 2019 8:24:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2996123148571605","opStatus":"SUCCESS","opTime":"165 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 32 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
2019-01-01 16:25:25 ERROR OperationLog:166 - Failed to remove corresponding log file of operation: OperationHandle [opType=EXECUTE_STATEMENT, getHandleIdentifier()=9f41afba-9f51-454d-9f4d-dc043fb09de3]
java.io.FileNotFoundException: File does not exist: /tmp/jenkins/operation_logs/7838440f-5922-4a1f-8278-56906088a762/9f41afba-9f51-454d-9f4d-dc043fb09de3
	at org.apache.commons.io.FileUtils.forceDelete(FileUtils.java:2275)
	at org.apache.hadoop.hive.ql.session.OperationLog$LogFile.remove(OperationLog.java:163)
	at org.apache.hadoop.hive.ql.session.OperationLog.close(OperationLog.java:121)
	at org.apache.hive.service.cli.operation.Operation.cleanupOperationLog(Operation.java:269)
	at org.apache.hive.service.cli.operation.SQLOperation.close(SQLOperation.java:317)
	at org.apache.hive.service.cli.operation.OperationManager.closeOperation(OperationManager.java:204)
	at org.apache.hive.service.cli.session.HiveSessionImpl.close(HiveSessionImpl.java:557)
	at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.close(HiveSessionImplwithUGI.java:110)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:78)
	at org.apache.hive.service.cli.session.HiveSessionProxy.access$000(HiveSessionProxy.java:36)
	at org.apache.hive.service.cli.session.HiveSessionProxy$1.run(HiveSessionProxy.java:63)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
	at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:59)
	at com.sun.proxy.$Proxy39.close(Unknown Source)
	at org.apache.hive.service.cli.session.SessionManager.closeSession(SessionManager.java:292)
	at org.apache.hive.service.cli.CLIService.closeSession(CLIService.java:237)
	at org.apache.hive.service.cli.thrift.ThriftCLIService$1.deleteContext(ThriftCLIService.java:120)
	at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:300)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
2019-01-01 16:25:25 ERROR HiveSessionImpl:591 - Failed to cleanup session log dir: SessionHandle [7838440f-5922-4a1f-8278-56906088a762]
java.io.FileNotFoundException: File does not exist: /tmp/jenkins/operation_logs/7838440f-5922-4a1f-8278-56906088a762
	at org.apache.commons.io.FileUtils.forceDelete(FileUtils.java:2275)
	at org.apache.hive.service.cli.session.HiveSessionImpl.cleanupSessionLogDir(HiveSessionImpl.java:589)
	at org.apache.hive.service.cli.session.HiveSessionImpl.close(HiveSessionImpl.java:561)
	at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.close(HiveSessionImplwithUGI.java:110)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:78)
	at org.apache.hive.service.cli.session.HiveSessionProxy.access$000(HiveSessionProxy.java:36)
	at org.apache.hive.service.cli.session.HiveSessionProxy$1.run(HiveSessionProxy.java:63)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
	at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:59)
	at com.sun.proxy.$Proxy39.close(Unknown Source)
	at org.apache.hive.service.cli.session.SessionManager.closeSession(SessionManager.java:292)
	at org.apache.hive.service.cli.CLIService.closeSession(CLIService.java:237)
	at org.apache.hive.service.cli.thrift.ThriftCLIService$1.deleteContext(ThriftCLIService.java:120)
	at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:300)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3282

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3282/display/redirect>

------------------------------------------
[...truncated 266.96 KB...]
+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good3|who and name3|
|which test1 good8|who and name0|
|which test1 good3|who and name5|
|which test1 good4|who and name0|
|which test1 good3|who and name3|
|which test1 good8|who and name0|
|which test1 good3|who and name5|
|which test1 good4|who and name0|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good3|who and name3|
|which test1 good8|who and name0|
|which test1 good3|who and name5|
|which test1 good4|who and name0|
|which test1 good3|who and name3|
|which test1 good8|who and name0|
|which test1 good3|who and name5|
|which test1 good4|who and name0|
+-----------------+-------------+

2019-01-01 12:17:51 AUDIT audit:72 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2981305773271521","opStatus":"START"}
2019-01-01 12:17:51 AUDIT audit:93 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2981305773271521","opStatus":"SUCCESS","opTime":"110 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2019-01-01 12:17:51 AUDIT audit:72 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2981305903625054","opStatus":"START"}
2019-01-01 12:17:51 AUDIT audit:93 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2981305903625054","opStatus":"SUCCESS","opTime":"78 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-01 12:17:51 AUDIT audit:72 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981305988441985","opStatus":"START"}
2019-01-01 12:17:51 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 12:17:51 AUDIT audit:93 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981305988441985","opStatus":"SUCCESS","opTime":"180 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 12:17:51 AUDIT audit:72 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981306174598913","opStatus":"START"}
2019-01-01 12:17:51 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 12:17:51 AUDIT audit:93 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981306174598913","opStatus":"SUCCESS","opTime":"168 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 12:17:51 AUDIT audit:72 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981306348580915","opStatus":"START"}
2019-01-01 12:17:51 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 12:17:51 AUDIT audit:93 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981306348580915","opStatus":"SUCCESS","opTime":"173 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 12:17:51 AUDIT audit:72 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981306527321200","opStatus":"START"}
2019-01-01 12:17:51 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 12:17:51 AUDIT audit:93 - {"time":"January 1, 2019 4:17:51 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981306527321200","opStatus":"SUCCESS","opTime":"171 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-01-01 12:17:52 AUDIT audit:72 - {"time":"January 1, 2019 4:17:52 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2981306784355620","opStatus":"START"}
2019-01-01 12:17:52 AUDIT audit:93 - {"time":"January 1, 2019 4:17:52 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2981306784355620","opStatus":"SUCCESS","opTime":"74 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-01-01 12:17:52 AUDIT audit:72 - {"time":"January 1, 2019 4:17:52 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981306939161425","opStatus":"START"}
2019-01-01 12:17:52 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 12:17:52 AUDIT audit:93 - {"time":"January 1, 2019 4:17:52 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981306939161425","opStatus":"SUCCESS","opTime":"171 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 12:17:52 AUDIT audit:72 - {"time":"January 1, 2019 4:17:52 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981307128621104","opStatus":"START"}
2019-01-01 12:17:52 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 12:17:52 AUDIT audit:93 - {"time":"January 1, 2019 4:17:52 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981307128621104","opStatus":"SUCCESS","opTime":"205 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2019-01-01 12:17:52 AUDIT audit:72 - {"time":"January 1, 2019 4:17:52 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2981307404636876","opStatus":"START"}
2019-01-01 12:17:52 AUDIT audit:93 - {"time":"January 1, 2019 4:17:52 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2981307404636876","opStatus":"SUCCESS","opTime":"48 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2019-01-01 12:17:52 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_2981307553967484

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2019-01-01 12:17:52 AUDIT audit:72 - {"time":"January 1, 2019 4:17:52 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2981307693582528","opStatus":"START"}
2019-01-01 12:17:53 AUDIT audit:93 - {"time":"January 1, 2019 4:17:53 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2981307693582528","opStatus":"SUCCESS","opTime":"90 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-01 12:17:53 AUDIT audit:72 - {"time":"January 1, 2019 4:17:53 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981307802728456","opStatus":"START"}
2019-01-01 12:17:53 AUDIT audit:93 - {"time":"January 1, 2019 4:17:53 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2981307802728456","opStatus":"SUCCESS","opTime":"180 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 38 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3281

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3281/display/redirect>

------------------------------------------
[...truncated 267.40 KB...]
|which test1 good7|who and name2|
|which test1 good7|who and name2|
|which test1 good2|who and name2|
|which test1 good8|who and name4|
|which test1 good7|who and name0|
|which test1 good7|who and name2|
|which test1 good7|who and name2|
|which test1 good2|who and name2|
|which test1 good8|who and name4|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good7|who and name0|
|which test1 good7|who and name2|
|which test1 good7|who and name2|
|which test1 good2|who and name2|
|which test1 good8|who and name4|
|which test1 good7|who and name0|
|which test1 good7|who and name2|
|which test1 good7|who and name2|
|which test1 good2|who and name2|
|which test1 good8|who and name4|
+-----------------+-------------+

2019-01-01 09:41:20 AUDIT audit:72 - {"time":"January 1, 2019 1:41:20 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6092761521666036","opStatus":"START"}
2019-01-01 09:41:20 AUDIT audit:93 - {"time":"January 1, 2019 1:41:20 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6092761521666036","opStatus":"SUCCESS","opTime":"125 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2019-01-01 09:41:20 AUDIT audit:72 - {"time":"January 1, 2019 1:41:20 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6092761665955514","opStatus":"START"}
2019-01-01 09:41:21 AUDIT audit:93 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6092761665955514","opStatus":"SUCCESS","opTime":"90 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-01 09:41:21 AUDIT audit:72 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092761762993484","opStatus":"START"}
2019-01-01 09:41:21 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 09:41:21 AUDIT audit:93 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092761762993484","opStatus":"SUCCESS","opTime":"200 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 09:41:21 AUDIT audit:72 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092761969505033","opStatus":"START"}
2019-01-01 09:41:21 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 09:41:21 AUDIT audit:93 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092761969505033","opStatus":"SUCCESS","opTime":"160 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 09:41:21 AUDIT audit:72 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092762135656908","opStatus":"START"}
2019-01-01 09:41:21 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 09:41:21 AUDIT audit:93 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092762135656908","opStatus":"SUCCESS","opTime":"210 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 09:41:21 AUDIT audit:72 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092762351589449","opStatus":"START"}
2019-01-01 09:41:21 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 09:41:21 AUDIT audit:93 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092762351589449","opStatus":"SUCCESS","opTime":"162 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-01-01 09:41:21 AUDIT audit:72 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6092762589005932","opStatus":"START"}
2019-01-01 09:41:21 AUDIT audit:93 - {"time":"January 1, 2019 1:41:21 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6092762589005932","opStatus":"SUCCESS","opTime":"75 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-01-01 09:41:22 AUDIT audit:72 - {"time":"January 1, 2019 1:41:22 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092762750432462","opStatus":"START"}
2019-01-01 09:41:22 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 09:41:22 AUDIT audit:93 - {"time":"January 1, 2019 1:41:22 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092762750432462","opStatus":"SUCCESS","opTime":"168 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 09:41:22 AUDIT audit:72 - {"time":"January 1, 2019 1:41:22 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092762934154950","opStatus":"START"}
2019-01-01 09:41:22 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 09:41:22 AUDIT audit:93 - {"time":"January 1, 2019 1:41:22 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092762934154950","opStatus":"SUCCESS","opTime":"217 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2019-01-01 09:41:22 AUDIT audit:72 - {"time":"January 1, 2019 1:41:22 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6092763232376247","opStatus":"START"}
2019-01-01 09:41:22 AUDIT audit:93 - {"time":"January 1, 2019 1:41:22 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6092763232376247","opStatus":"SUCCESS","opTime":"49 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2019-01-01 09:41:22 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_6092763377191062

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2019-01-01 09:41:22 AUDIT audit:72 - {"time":"January 1, 2019 1:41:22 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6092763544965522","opStatus":"START"}
2019-01-01 09:41:22 AUDIT audit:93 - {"time":"January 1, 2019 1:41:22 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6092763544965522","opStatus":"SUCCESS","opTime":"93 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-01 09:41:22 AUDIT audit:72 - {"time":"January 1, 2019 1:41:22 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092763662561174","opStatus":"START"}
2019-01-01 09:41:23 AUDIT audit:93 - {"time":"January 1, 2019 1:41:23 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6092763662561174","opStatus":"SUCCESS","opTime":"179 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 3 minutes, 0 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3280

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3280/display/redirect>

------------------------------------------
[...truncated 267.99 KB...]
|which test1 good9|who and name4|
|which test1 good3|who and name2|
|which test1 good7|who and name6|
|which test1 good6|who and name1|
|which test1 good8|who and name7|
|which test1 good6|who and name6|
|which test1 good2|who and name3|
|which test1 good3|who and name7|
|which test1 good9|who and name4|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good3|who and name2|
|which test1 good7|who and name6|
|which test1 good6|who and name1|
|which test1 good8|who and name7|
|which test1 good6|who and name6|
|which test1 good2|who and name3|
|which test1 good3|who and name7|
|which test1 good9|who and name4|
|which test1 good3|who and name2|
|which test1 good7|who and name6|
+-----------------+-------------+

2019-01-01 08:16:39 AUDIT audit:72 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2966833805738850","opStatus":"START"}
2019-01-01 08:16:39 AUDIT audit:93 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2966833805738850","opStatus":"SUCCESS","opTime":"110 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2019-01-01 08:16:39 AUDIT audit:72 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2966833928281348","opStatus":"START"}
2019-01-01 08:16:39 AUDIT audit:93 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2966833928281348","opStatus":"SUCCESS","opTime":"73 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-01 08:16:39 AUDIT audit:72 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966834005885688","opStatus":"START"}
2019-01-01 08:16:39 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 08:16:39 AUDIT audit:93 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966834005885688","opStatus":"SUCCESS","opTime":"173 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 08:16:39 AUDIT audit:72 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966834184154206","opStatus":"START"}
2019-01-01 08:16:39 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 08:16:39 AUDIT audit:93 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966834184154206","opStatus":"SUCCESS","opTime":"170 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 08:16:39 AUDIT audit:72 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966834363979447","opStatus":"START"}
2019-01-01 08:16:39 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 08:16:39 AUDIT audit:93 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966834363979447","opStatus":"SUCCESS","opTime":"182 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 08:16:39 AUDIT audit:72 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966834557507994","opStatus":"START"}
2019-01-01 08:16:39 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 08:16:39 AUDIT audit:93 - {"time":"January 1, 2019 12:16:39 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966834557507994","opStatus":"SUCCESS","opTime":"200 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-01-01 08:16:40 AUDIT audit:72 - {"time":"January 1, 2019 12:16:40 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2966834838782993","opStatus":"START"}
2019-01-01 08:16:40 AUDIT audit:93 - {"time":"January 1, 2019 12:16:40 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2966834838782993","opStatus":"SUCCESS","opTime":"71 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2019-01-01 08:16:40 AUDIT audit:72 - {"time":"January 1, 2019 12:16:40 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966834980225916","opStatus":"START"}
2019-01-01 08:16:40 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 08:16:40 AUDIT audit:93 - {"time":"January 1, 2019 12:16:40 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966834980225916","opStatus":"SUCCESS","opTime":"173 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2019-01-01 08:16:40 AUDIT audit:72 - {"time":"January 1, 2019 12:16:40 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966835162792192","opStatus":"START"}
2019-01-01 08:16:40 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2019-01-01 08:16:40 AUDIT audit:93 - {"time":"January 1, 2019 12:16:40 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966835162792192","opStatus":"SUCCESS","opTime":"212 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2019-01-01 08:16:40 AUDIT audit:72 - {"time":"January 1, 2019 12:16:40 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2966835437171105","opStatus":"START"}
2019-01-01 08:16:40 AUDIT audit:93 - {"time":"January 1, 2019 12:16:40 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2966835437171105","opStatus":"SUCCESS","opTime":"48 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2019-01-01 08:16:40 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_2966835570612809

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2019-01-01 08:16:40 AUDIT audit:72 - {"time":"January 1, 2019 12:16:40 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2966835704468231","opStatus":"START"}
2019-01-01 08:16:41 AUDIT audit:93 - {"time":"January 1, 2019 12:16:41 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2966835704468231","opStatus":"SUCCESS","opTime":"72 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-01-01 08:16:41 AUDIT audit:72 - {"time":"January 1, 2019 12:16:41 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966835794779921","opStatus":"START"}
2019-01-01 08:16:41 AUDIT audit:93 - {"time":"January 1, 2019 12:16:41 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2966835794779921","opStatus":"SUCCESS","opTime":"152 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 36 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3279

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3279/display/redirect>

------------------------------------------
[...truncated 268.42 KB...]
|which test1 good7|who and name6|
|which test1 good2|who and name0|
|which test1 good7|who and name3|
|which test1 good3|who and name2|
|which test1 good5|who and name4|
|which test1 good8|who and name5|
|which test1 good7|who and name1|
|which test1 good7|who and name3|
|which test1 good7|who and name6|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good1|who and name2|
|which test1 good2|who and name0|
|which test1 good1|who and name1|
|which test1 good7|who and name3|
|which test1 good3|who and name2|
|which test1 good5|who and name4|
|which test1 good8|who and name5|
|which test1 good7|who and name1|
|which test1 good7|who and name3|
|which test1 good7|who and name6|
+-----------------+-------------+

2018-12-31 09:31:06 AUDIT audit:72 - {"time":"December 31, 2018 1:31:06 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6005747143419150","opStatus":"START"}
2018-12-31 09:31:06 AUDIT audit:93 - {"time":"December 31, 2018 1:31:06 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6005747143419150","opStatus":"SUCCESS","opTime":"124 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2018-12-31 09:31:06 AUDIT audit:72 - {"time":"December 31, 2018 1:31:06 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6005747291550204","opStatus":"START"}
2018-12-31 09:31:06 AUDIT audit:93 - {"time":"December 31, 2018 1:31:06 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6005747291550204","opStatus":"SUCCESS","opTime":"75 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-31 09:31:06 AUDIT audit:72 - {"time":"December 31, 2018 1:31:06 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005747372452837","opStatus":"START"}
2018-12-31 09:31:06 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:31:06 AUDIT audit:93 - {"time":"December 31, 2018 1:31:06 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005747372452837","opStatus":"SUCCESS","opTime":"186 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 09:31:06 AUDIT audit:72 - {"time":"December 31, 2018 1:31:06 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005747565630803","opStatus":"START"}
2018-12-31 09:31:06 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:31:07 AUDIT audit:93 - {"time":"December 31, 2018 1:31:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005747565630803","opStatus":"SUCCESS","opTime":"184 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 09:31:07 AUDIT audit:72 - {"time":"December 31, 2018 1:31:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005747758294161","opStatus":"START"}
2018-12-31 09:31:07 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:31:07 AUDIT audit:93 - {"time":"December 31, 2018 1:31:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005747758294161","opStatus":"SUCCESS","opTime":"199 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 09:31:07 AUDIT audit:72 - {"time":"December 31, 2018 1:31:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005747964197690","opStatus":"START"}
2018-12-31 09:31:07 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:31:07 AUDIT audit:93 - {"time":"December 31, 2018 1:31:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005747964197690","opStatus":"SUCCESS","opTime":"195 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-31 09:31:07 AUDIT audit:72 - {"time":"December 31, 2018 1:31:07 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6005748271066653","opStatus":"START"}
2018-12-31 09:31:07 AUDIT audit:93 - {"time":"December 31, 2018 1:31:07 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6005748271066653","opStatus":"SUCCESS","opTime":"78 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-31 09:31:07 AUDIT audit:72 - {"time":"December 31, 2018 1:31:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005748428642447","opStatus":"START"}
2018-12-31 09:31:07 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:31:07 AUDIT audit:93 - {"time":"December 31, 2018 1:31:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005748428642447","opStatus":"SUCCESS","opTime":"174 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 09:31:07 AUDIT audit:72 - {"time":"December 31, 2018 1:31:07 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005748609877712","opStatus":"START"}
2018-12-31 09:31:08 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:31:08 AUDIT audit:93 - {"time":"December 31, 2018 1:31:08 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005748609877712","opStatus":"SUCCESS","opTime":"192 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2018-12-31 09:31:08 AUDIT audit:72 - {"time":"December 31, 2018 1:31:08 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6005748882443178","opStatus":"START"}
2018-12-31 09:31:08 AUDIT audit:93 - {"time":"December 31, 2018 1:31:08 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"6005748882443178","opStatus":"SUCCESS","opTime":"71 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2018-12-31 09:31:08 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_6005749078444961

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2018-12-31 09:31:08 AUDIT audit:72 - {"time":"December 31, 2018 1:31:08 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6005749229056865","opStatus":"START"}
2018-12-31 09:31:08 AUDIT audit:93 - {"time":"December 31, 2018 1:31:08 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"6005749229056865","opStatus":"SUCCESS","opTime":"88 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-31 09:31:08 AUDIT audit:72 - {"time":"December 31, 2018 1:31:08 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005749337470610","opStatus":"START"}
2018-12-31 09:31:08 AUDIT audit:93 - {"time":"December 31, 2018 1:31:08 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"6005749337470610","opStatus":"SUCCESS","opTime":"191 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 57 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3278

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3278/display/redirect>

------------------------------------------
[...truncated 267.53 KB...]
time for query on table with lucene datamap table:0.32
time for query on table without lucene datamap table:0.22
+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good5|who and name2|
|which test1 good8|who and name0|
|which test1 good2|who and name2|
|which test1 good5|who and name2|
|which test1 good8|who and name0|
|which test1 good2|who and name2|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good5|who and name2|
|which test1 good8|who and name0|
|which test1 good1|who and name6|
|which test1 good2|who and name2|
|which test1 good5|who and name2|
|which test1 good8|who and name0|
|which test1 good1|who and name6|
|which test1 good2|who and name2|
+-----------------+-------------+

2018-12-31 09:24:29 AUDIT audit:72 - {"time":"December 31, 2018 1:24:29 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2884504691603221","opStatus":"START"}
2018-12-31 09:24:30 AUDIT audit:93 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2884504691603221","opStatus":"SUCCESS","opTime":"101 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2018-12-31 09:24:30 AUDIT audit:72 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2884504807209402","opStatus":"START"}
2018-12-31 09:24:30 AUDIT audit:93 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2884504807209402","opStatus":"SUCCESS","opTime":"74 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-31 09:24:30 AUDIT audit:72 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884504887571426","opStatus":"START"}
2018-12-31 09:24:30 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:24:30 AUDIT audit:93 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884504887571426","opStatus":"SUCCESS","opTime":"155 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 09:24:30 AUDIT audit:72 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884505047329437","opStatus":"START"}
2018-12-31 09:24:30 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:24:30 AUDIT audit:93 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884505047329437","opStatus":"SUCCESS","opTime":"143 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 09:24:30 AUDIT audit:72 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884505196652675","opStatus":"START"}
2018-12-31 09:24:30 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:24:30 AUDIT audit:93 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884505196652675","opStatus":"SUCCESS","opTime":"169 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 09:24:30 AUDIT audit:72 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884505371122171","opStatus":"START"}
2018-12-31 09:24:30 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:24:30 AUDIT audit:93 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884505371122171","opStatus":"SUCCESS","opTime":"187 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-31 09:24:30 AUDIT audit:72 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2884505619349931","opStatus":"START"}
2018-12-31 09:24:30 AUDIT audit:93 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2884505619349931","opStatus":"SUCCESS","opTime":"54 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-31 09:24:30 AUDIT audit:72 - {"time":"December 31, 2018 1:24:30 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884505731704021","opStatus":"START"}
2018-12-31 09:24:31 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:24:31 AUDIT audit:93 - {"time":"December 31, 2018 1:24:31 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884505731704021","opStatus":"SUCCESS","opTime":"158 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 09:24:31 AUDIT audit:72 - {"time":"December 31, 2018 1:24:31 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884505896652342","opStatus":"START"}
2018-12-31 09:24:31 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 09:24:31 AUDIT audit:93 - {"time":"December 31, 2018 1:24:31 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884505896652342","opStatus":"SUCCESS","opTime":"174 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2018-12-31 09:24:31 AUDIT audit:72 - {"time":"December 31, 2018 1:24:31 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2884506136643702","opStatus":"START"}
2018-12-31 09:24:31 AUDIT audit:93 - {"time":"December 31, 2018 1:24:31 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2884506136643702","opStatus":"SUCCESS","opTime":"48 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2018-12-31 09:24:31 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_2884506260235305

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2018-12-31 09:24:31 AUDIT audit:72 - {"time":"December 31, 2018 1:24:31 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2884506415939456","opStatus":"START"}
2018-12-31 09:24:31 AUDIT audit:93 - {"time":"December 31, 2018 1:24:31 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2884506415939456","opStatus":"SUCCESS","opTime":"74 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-31 09:24:31 AUDIT audit:72 - {"time":"December 31, 2018 1:24:31 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884506511363636","opStatus":"START"}
2018-12-31 09:24:31 AUDIT audit:93 - {"time":"December 31, 2018 1:24:31 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2884506511363636","opStatus":"SUCCESS","opTime":"150 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 28 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3277

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3277/display/redirect>

------------------------------------------
[...truncated 268.36 KB...]
+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good5|who and name5|
|which test1 good3|who and name1|
|which test1 good3|who and name0|
|which test1 good2|who and name1|
|which test1 good5|who and name5|
|which test1 good3|who and name1|
|which test1 good3|who and name0|
|which test1 good2|who and name1|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good5|who and name5|
|which test1 good3|who and name1|
|which test1 good3|who and name0|
|which test1 good2|who and name1|
|which test1 good5|who and name5|
|which test1 good3|who and name1|
|which test1 good3|who and name0|
|which test1 good2|who and name1|
+-----------------+-------------+

2018-12-31 05:26:30 AUDIT audit:72 - {"time":"December 30, 2018 9:26:30 PM PST","username":"jenkins","opName":"DROP TABLE","opId":"5991071327209447","opStatus":"START"}
2018-12-31 05:26:30 AUDIT audit:93 - {"time":"December 30, 2018 9:26:30 PM PST","username":"jenkins","opName":"DROP TABLE","opId":"5991071327209447","opStatus":"SUCCESS","opTime":"123 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2018-12-31 05:26:30 AUDIT audit:72 - {"time":"December 30, 2018 9:26:30 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5991071470657744","opStatus":"START"}
2018-12-31 05:26:30 AUDIT audit:93 - {"time":"December 30, 2018 9:26:30 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5991071470657744","opStatus":"SUCCESS","opTime":"93 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-31 05:26:30 AUDIT audit:72 - {"time":"December 30, 2018 9:26:30 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991071570771030","opStatus":"START"}
2018-12-31 05:26:31 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 05:26:31 AUDIT audit:93 - {"time":"December 30, 2018 9:26:31 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991071570771030","opStatus":"SUCCESS","opTime":"258 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 05:26:31 AUDIT audit:72 - {"time":"December 30, 2018 9:26:31 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991071835814456","opStatus":"START"}
2018-12-31 05:26:31 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 05:26:31 AUDIT audit:93 - {"time":"December 30, 2018 9:26:31 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991071835814456","opStatus":"SUCCESS","opTime":"174 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 05:26:31 AUDIT audit:72 - {"time":"December 30, 2018 9:26:31 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991072016532154","opStatus":"START"}
2018-12-31 05:26:31 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 05:26:31 AUDIT audit:93 - {"time":"December 30, 2018 9:26:31 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991072016532154","opStatus":"SUCCESS","opTime":"177 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 05:26:31 AUDIT audit:72 - {"time":"December 30, 2018 9:26:31 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991072198628236","opStatus":"START"}
2018-12-31 05:26:31 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 05:26:31 AUDIT audit:93 - {"time":"December 30, 2018 9:26:31 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991072198628236","opStatus":"SUCCESS","opTime":"175 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-31 05:26:31 AUDIT audit:72 - {"time":"December 30, 2018 9:26:31 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5991072446058721","opStatus":"START"}
2018-12-31 05:26:31 AUDIT audit:93 - {"time":"December 30, 2018 9:26:31 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5991072446058721","opStatus":"SUCCESS","opTime":"68 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-31 05:26:31 AUDIT audit:72 - {"time":"December 30, 2018 9:26:31 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991072579596901","opStatus":"START"}
2018-12-31 05:26:32 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 05:26:32 AUDIT audit:93 - {"time":"December 30, 2018 9:26:32 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991072579596901","opStatus":"SUCCESS","opTime":"204 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-31 05:26:32 AUDIT audit:72 - {"time":"December 30, 2018 9:26:32 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991072795147337","opStatus":"START"}
2018-12-31 05:26:32 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-31 05:26:32 AUDIT audit:93 - {"time":"December 30, 2018 9:26:32 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991072795147337","opStatus":"SUCCESS","opTime":"212 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2018-12-31 05:26:32 AUDIT audit:72 - {"time":"December 30, 2018 9:26:32 PM PST","username":"jenkins","opName":"DROP TABLE","opId":"5991073090792854","opStatus":"START"}
2018-12-31 05:26:32 AUDIT audit:93 - {"time":"December 30, 2018 9:26:32 PM PST","username":"jenkins","opName":"DROP TABLE","opId":"5991073090792854","opStatus":"SUCCESS","opTime":"55 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2018-12-31 05:26:32 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_5991073255701932

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
null
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
- CarbonReaderExample
2018-12-31 05:26:32 AUDIT audit:72 - {"time":"December 30, 2018 9:26:32 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5991073421993233","opStatus":"START"}
2018-12-31 05:26:32 AUDIT audit:93 - {"time":"December 30, 2018 9:26:32 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5991073421993233","opStatus":"SUCCESS","opTime":"94 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-31 05:26:32 AUDIT audit:72 - {"time":"December 30, 2018 9:26:32 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991073544954083","opStatus":"START"}
2018-12-31 05:26:33 AUDIT audit:93 - {"time":"December 30, 2018 9:26:33 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5991073544954083","opStatus":"SUCCESS","opTime":"322 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 3 minutes, 0 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3276

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3276/display/redirect>

------------------------------------------
[...truncated 268.42 KB...]
+--------+

time for query on table with lucene datamap table:0.247
time for query on table without lucene datamap table:0.176
+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good9|who and name5|
|which test1 good9|who and name7|
|which test1 good4|who and name5|
|which test1 good9|who and name5|
|which test1 good9|who and name7|
|which test1 good4|who and name5|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good9|who and name5|
|which test1 good9|who and name7|
|which test1 good4|who and name5|
|which test1 good9|who and name5|
|which test1 good9|who and name7|
|which test1 good4|who and name5|
+-----------------+-------------+

2018-12-30 09:25:04 AUDIT audit:72 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2798138926340929","opStatus":"START"}
2018-12-30 09:25:04 AUDIT audit:93 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2798138926340929","opStatus":"SUCCESS","opTime":"86 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2018-12-30 09:25:04 AUDIT audit:72 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2798139024082420","opStatus":"START"}
2018-12-30 09:25:04 AUDIT audit:93 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2798139024082420","opStatus":"SUCCESS","opTime":"56 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-30 09:25:04 AUDIT audit:72 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798139084557089","opStatus":"START"}
2018-12-30 09:25:04 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:25:04 AUDIT audit:93 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798139084557089","opStatus":"SUCCESS","opTime":"151 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-30 09:25:04 AUDIT audit:72 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798139242636661","opStatus":"START"}
2018-12-30 09:25:04 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:25:04 AUDIT audit:93 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798139242636661","opStatus":"SUCCESS","opTime":"158 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-30 09:25:04 AUDIT audit:72 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798139406622908","opStatus":"START"}
2018-12-30 09:25:04 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:25:04 AUDIT audit:93 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798139406622908","opStatus":"SUCCESS","opTime":"144 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-30 09:25:04 AUDIT audit:72 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798139557364590","opStatus":"START"}
2018-12-30 09:25:04 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:25:04 AUDIT audit:93 - {"time":"December 30, 2018 1:25:04 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798139557364590","opStatus":"SUCCESS","opTime":"159 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-30 09:25:05 AUDIT audit:72 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2798139783953089","opStatus":"START"}
2018-12-30 09:25:05 AUDIT audit:93 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2798139783953089","opStatus":"SUCCESS","opTime":"53 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-30 09:25:05 AUDIT audit:72 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798139889086281","opStatus":"START"}
2018-12-30 09:25:05 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:25:05 AUDIT audit:93 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798139889086281","opStatus":"SUCCESS","opTime":"148 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-30 09:25:05 AUDIT audit:72 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798140042817783","opStatus":"START"}
2018-12-30 09:25:05 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-30 09:25:05 AUDIT audit:93 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798140042817783","opStatus":"SUCCESS","opTime":"160 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2018-12-30 09:25:05 AUDIT audit:72 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2798140272948292","opStatus":"START"}
2018-12-30 09:25:05 AUDIT audit:93 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2798140272948292","opStatus":"SUCCESS","opTime":"42 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2018-12-30 09:25:05 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_2798140397397222

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2018-12-30 09:25:05 AUDIT audit:72 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2798140520146785","opStatus":"START"}
2018-12-30 09:25:05 AUDIT audit:93 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2798140520146785","opStatus":"SUCCESS","opTime":"76 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-30 09:25:05 AUDIT audit:72 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798140612817114","opStatus":"START"}
2018-12-30 09:25:05 AUDIT audit:93 - {"time":"December 30, 2018 1:25:05 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2798140612817114","opStatus":"SUCCESS","opTime":"128 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 30 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3275

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3275/display/redirect>

------------------------------------------
[...truncated 267.95 KB...]
+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good5|who and name6|
|which test1 good9|who and name7|
|which test1 good0|who and name1|
|which test1 good6|who and name0|
|which test1 good5|who and name6|
|which test1 good9|who and name7|
|which test1 good0|who and name1|
|which test1 good6|who and name0|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good5|who and name6|
|which test1 good9|who and name7|
|which test1 good0|who and name1|
|which test1 good6|who and name0|
|which test1 good5|who and name6|
|which test1 good9|who and name7|
|which test1 good0|who and name1|
|which test1 good6|who and name0|
+-----------------+-------------+

2018-12-29 18:16:48 AUDIT audit:72 - {"time":"December 29, 2018 10:16:48 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"5801458609490635","opStatus":"START"}
2018-12-29 18:16:48 AUDIT audit:93 - {"time":"December 29, 2018 10:16:48 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"5801458609490635","opStatus":"SUCCESS","opTime":"79 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2018-12-29 18:16:48 AUDIT audit:72 - {"time":"December 29, 2018 10:16:48 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5801458700264488","opStatus":"START"}
2018-12-29 18:16:48 AUDIT audit:93 - {"time":"December 29, 2018 10:16:48 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5801458700264488","opStatus":"SUCCESS","opTime":"57 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-29 18:16:48 AUDIT audit:72 - {"time":"December 29, 2018 10:16:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801458761942328","opStatus":"START"}
2018-12-29 18:16:48 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 18:16:48 AUDIT audit:93 - {"time":"December 29, 2018 10:16:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801458761942328","opStatus":"SUCCESS","opTime":"170 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 18:16:48 AUDIT audit:72 - {"time":"December 29, 2018 10:16:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801458938839682","opStatus":"START"}
2018-12-29 18:16:48 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 18:16:49 AUDIT audit:93 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801458938839682","opStatus":"SUCCESS","opTime":"163 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 18:16:49 AUDIT audit:72 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801459107258328","opStatus":"START"}
2018-12-29 18:16:49 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 18:16:49 AUDIT audit:93 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801459107258328","opStatus":"SUCCESS","opTime":"157 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 18:16:49 AUDIT audit:72 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801459270620510","opStatus":"START"}
2018-12-29 18:16:49 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 18:16:49 AUDIT audit:93 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801459270620510","opStatus":"SUCCESS","opTime":"167 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-29 18:16:49 AUDIT audit:72 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5801459507352882","opStatus":"START"}
2018-12-29 18:16:49 AUDIT audit:93 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5801459507352882","opStatus":"SUCCESS","opTime":"47 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-29 18:16:49 AUDIT audit:72 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801459610640276","opStatus":"START"}
2018-12-29 18:16:49 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 18:16:49 AUDIT audit:93 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801459610640276","opStatus":"SUCCESS","opTime":"163 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 18:16:49 AUDIT audit:72 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801459780234894","opStatus":"START"}
2018-12-29 18:16:49 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 18:16:49 AUDIT audit:93 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801459780234894","opStatus":"SUCCESS","opTime":"176 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2018-12-29 18:16:49 AUDIT audit:72 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"5801460029826322","opStatus":"START"}
2018-12-29 18:16:49 AUDIT audit:93 - {"time":"December 29, 2018 10:16:49 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"5801460029826322","opStatus":"SUCCESS","opTime":"31 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2018-12-29 18:16:50 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_5801460122229646

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
null
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
- CarbonReaderExample
2018-12-29 18:16:50 AUDIT audit:72 - {"time":"December 29, 2018 10:16:50 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5801460255081335","opStatus":"START"}
2018-12-29 18:16:50 AUDIT audit:93 - {"time":"December 29, 2018 10:16:50 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5801460255081335","opStatus":"SUCCESS","opTime":"57 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-29 18:16:50 AUDIT audit:72 - {"time":"December 29, 2018 10:16:50 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801460334395555","opStatus":"START"}
2018-12-29 18:16:50 AUDIT audit:93 - {"time":"December 29, 2018 10:16:50 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"5801460334395555","opStatus":"SUCCESS","opTime":"164 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 1 minute, 59 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3274

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3274/display/redirect>

------------------------------------------
[...truncated 267.83 KB...]
+--------+

time for query on table with lucene datamap table:0.269
time for query on table without lucene datamap table:0.185
+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good8|who and name5|
|which test1 good5|who and name2|
|which test1 good2|who and name3|
|which test1 good8|who and name5|
|which test1 good5|who and name2|
|which test1 good2|who and name3|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good8|who and name5|
|which test1 good5|who and name2|
|which test1 good2|who and name3|
|which test1 good8|who and name5|
|which test1 good5|who and name2|
|which test1 good2|who and name3|
+-----------------+-------------+

2018-12-29 09:23:47 AUDIT audit:72 - {"time":"December 29, 2018 1:23:47 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2711662301183370","opStatus":"START"}
2018-12-29 09:23:47 AUDIT audit:93 - {"time":"December 29, 2018 1:23:47 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2711662301183370","opStatus":"SUCCESS","opTime":"95 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2018-12-29 09:23:47 AUDIT audit:72 - {"time":"December 29, 2018 1:23:47 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2711662412752199","opStatus":"START"}
2018-12-29 09:23:47 AUDIT audit:93 - {"time":"December 29, 2018 1:23:47 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2711662412752199","opStatus":"SUCCESS","opTime":"64 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-29 09:23:47 AUDIT audit:72 - {"time":"December 29, 2018 1:23:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711662481730880","opStatus":"START"}
2018-12-29 09:23:47 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 09:23:47 AUDIT audit:93 - {"time":"December 29, 2018 1:23:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711662481730880","opStatus":"SUCCESS","opTime":"144 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 09:23:47 AUDIT audit:72 - {"time":"December 29, 2018 1:23:47 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711662630601041","opStatus":"START"}
2018-12-29 09:23:47 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 09:23:48 AUDIT audit:93 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711662630601041","opStatus":"SUCCESS","opTime":"140 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 09:23:48 AUDIT audit:72 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711662776200398","opStatus":"START"}
2018-12-29 09:23:48 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 09:23:48 AUDIT audit:93 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711662776200398","opStatus":"SUCCESS","opTime":"162 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 09:23:48 AUDIT audit:72 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711662944473577","opStatus":"START"}
2018-12-29 09:23:48 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 09:23:48 AUDIT audit:93 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711662944473577","opStatus":"SUCCESS","opTime":"182 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-29 09:23:48 AUDIT audit:72 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2711663202515827","opStatus":"START"}
2018-12-29 09:23:48 AUDIT audit:93 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2711663202515827","opStatus":"SUCCESS","opTime":"72 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-29 09:23:48 AUDIT audit:72 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711663334198443","opStatus":"START"}
2018-12-29 09:23:48 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 09:23:48 AUDIT audit:93 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711663334198443","opStatus":"SUCCESS","opTime":"170 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 09:23:48 AUDIT audit:72 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711663510780371","opStatus":"START"}
2018-12-29 09:23:48 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 09:23:48 AUDIT audit:93 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711663510780371","opStatus":"SUCCESS","opTime":"176 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2018-12-29 09:23:48 AUDIT audit:72 - {"time":"December 29, 2018 1:23:48 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2711663768754907","opStatus":"START"}
2018-12-29 09:23:49 AUDIT audit:93 - {"time":"December 29, 2018 1:23:49 AM PST","username":"jenkins","opName":"DROP TABLE","opId":"2711663768754907","opStatus":"SUCCESS","opTime":"50 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2018-12-29 09:23:49 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_2711663898118400

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2018-12-29 09:23:49 AUDIT audit:72 - {"time":"December 29, 2018 1:23:49 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2711664048306998","opStatus":"START"}
2018-12-29 09:23:49 AUDIT audit:93 - {"time":"December 29, 2018 1:23:49 AM PST","username":"jenkins","opName":"CREATE TABLE","opId":"2711664048306998","opStatus":"SUCCESS","opTime":"69 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-29 09:23:49 AUDIT audit:72 - {"time":"December 29, 2018 1:23:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711664136607820","opStatus":"START"}
2018-12-29 09:23:49 AUDIT audit:93 - {"time":"December 29, 2018 1:23:49 AM PST","username":"jenkins","opName":"LOAD DATA","opId":"2711664136607820","opStatus":"SUCCESS","opTime":"161 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 33 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3273

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3273/display/redirect>

------------------------------------------
[...truncated 267.57 KB...]
+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good9|who and name7|
|which test1 good7|who and name2|
|which test1 good6|who and name2|
|which test1 good2|who and name6|
|which test1 good9|who and name7|
|which test1 good7|who and name2|
|which test1 good6|who and name2|
|which test1 good2|who and name6|
+-----------------+-------------+

+-----------------+-------------+
|               id|         name|
+-----------------+-------------+
|which test1 good9|who and name7|
|which test1 good7|who and name2|
|which test1 good6|who and name2|
|which test1 good2|who and name6|
|which test1 good9|who and name7|
|which test1 good7|who and name2|
|which test1 good6|who and name2|
|which test1 good2|who and name6|
+-----------------+-------------+

2018-12-29 05:41:34 AUDIT audit:72 - {"time":"December 28, 2018 9:41:34 PM PST","username":"jenkins","opName":"DROP TABLE","opId":"5819174951465313","opStatus":"START"}
2018-12-29 05:41:34 AUDIT audit:93 - {"time":"December 28, 2018 9:41:34 PM PST","username":"jenkins","opName":"DROP TABLE","opId":"5819174951465313","opStatus":"SUCCESS","opTime":"129 ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2018-12-29 05:41:34 AUDIT audit:72 - {"time":"December 28, 2018 9:41:34 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5819175100899101","opStatus":"START"}
2018-12-29 05:41:34 AUDIT audit:93 - {"time":"December 28, 2018 9:41:34 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5819175100899101","opStatus":"SUCCESS","opTime":"92 ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-29 05:41:34 AUDIT audit:72 - {"time":"December 28, 2018 9:41:34 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819175200079353","opStatus":"START"}
2018-12-29 05:41:34 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 05:41:34 AUDIT audit:93 - {"time":"December 28, 2018 9:41:34 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819175200079353","opStatus":"SUCCESS","opTime":"211 ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 05:41:34 AUDIT audit:72 - {"time":"December 28, 2018 9:41:34 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819175419067540","opStatus":"START"}
2018-12-29 05:41:34 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 05:41:34 AUDIT audit:93 - {"time":"December 28, 2018 9:41:34 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819175419067540","opStatus":"SUCCESS","opTime":"172 ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 05:41:34 AUDIT audit:72 - {"time":"December 28, 2018 9:41:34 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819175597659150","opStatus":"START"}
2018-12-29 05:41:35 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 05:41:35 AUDIT audit:93 - {"time":"December 28, 2018 9:41:35 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819175597659150","opStatus":"SUCCESS","opTime":"175 ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 05:41:35 AUDIT audit:72 - {"time":"December 28, 2018 9:41:35 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819175779242313","opStatus":"START"}
2018-12-29 05:41:35 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 05:41:35 AUDIT audit:93 - {"time":"December 28, 2018 9:41:35 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819175779242313","opStatus":"SUCCESS","opTime":"166 ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-29 05:41:35 AUDIT audit:72 - {"time":"December 28, 2018 9:41:35 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5819176034416213","opStatus":"START"}
2018-12-29 05:41:35 AUDIT audit:93 - {"time":"December 28, 2018 9:41:35 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5819176034416213","opStatus":"SUCCESS","opTime":"74 ms","table":"default.external_table","extraInfo":{"bad_record_path":"","_filelevelformat":"false","local_dictionary_enable":"true","external":"true","_external":"true","sort_columns":"","comment":""}}
+--------+
|count(1)|
+--------+
|      40|
+--------+

2018-12-29 05:41:35 AUDIT audit:72 - {"time":"December 28, 2018 9:41:35 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819176192043447","opStatus":"START"}
2018-12-29 05:41:35 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 05:41:35 AUDIT audit:93 - {"time":"December 28, 2018 9:41:35 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819176192043447","opStatus":"SUCCESS","opTime":"175 ms","table":"default.origin_table","extraInfo":{"SegmentId":"4","DataSize":"2.85KB","IndexSize":"1.38KB"}}
2018-12-29 05:41:35 AUDIT audit:72 - {"time":"December 28, 2018 9:41:35 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819176374343944","opStatus":"START"}
2018-12-29 05:41:35 ERROR DataLoadExecutor:55 - Data Load is partially success for table origin_table
2018-12-29 05:41:35 AUDIT audit:93 - {"time":"December 28, 2018 9:41:35 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819176374343944","opStatus":"SUCCESS","opTime":"180 ms","table":"default.origin_table","extraInfo":{"SegmentId":"5","DataSize":"2.85KB","IndexSize":"1.38KB"}}
+--------+
|count(1)|
+--------+
|      60|
+--------+

2018-12-29 05:41:35 AUDIT audit:72 - {"time":"December 28, 2018 9:41:35 PM PST","username":"jenkins","opName":"DROP TABLE","opId":"5819176670527305","opStatus":"START"}
2018-12-29 05:41:36 AUDIT audit:93 - {"time":"December 28, 2018 9:41:36 PM PST","username":"jenkins","opName":"DROP TABLE","opId":"5819176670527305","opStatus":"SUCCESS","opTime":"57 ms","table":"default.origin_table","extraInfo":{}}
- ExternalTableExample
2018-12-29 05:41:36 ERROR DataLoadExecutor:55 - Data Load is partially success for table _tempTable_5819176812721911

Data:
java.lang.NullPointerException
	at org.apache.carbondata.examples.sdk.CarbonReaderExample.main(CarbonReaderExample.java:118)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply$mcV$sp(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.apache.carbondata.examplesCI.RunExamples$$anonfun$19.apply(RunExamples.scala:126)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.examplesCI.RunExamples.org$scalatest$BeforeAndAfterAll$$super$run(RunExamples.scala:35)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.examplesCI.RunExamples.run(RunExamples.scala:35)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
null
- CarbonReaderExample
2018-12-29 05:41:36 AUDIT audit:72 - {"time":"December 28, 2018 9:41:36 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5819176949160810","opStatus":"START"}
2018-12-29 05:41:36 AUDIT audit:93 - {"time":"December 28, 2018 9:41:36 PM PST","username":"jenkins","opName":"CREATE TABLE","opId":"5819176949160810","opStatus":"SUCCESS","opTime":"75 ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2018-12-29 05:41:36 AUDIT audit:72 - {"time":"December 28, 2018 9:41:36 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819177043325129","opStatus":"START"}
2018-12-29 05:41:36 AUDIT audit:93 - {"time":"December 28, 2018 9:41:36 PM PST","username":"jenkins","opName":"LOAD DATA","opId":"5819177043325129","opStatus":"SUCCESS","opTime":"155 ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"921.0B","IndexSize":"498.0B"}}
+---+---------+--------+
| id|     name|  salary|
+---+---------+--------+
|  1|  'liang'|200000.0|
|  2|'anubhav'| 20000.0|
+---+---------+--------+

OK
OK
OK
OK
+---++-------++--------------+
| ID|| NAME || SALARY        |
+---++-------++--------------+
| 1 || 'liang' || 200000.0  |
+---++-------++--------------+
| 2 || 'anubhav' || 20000.0   |
+---++-------++--------------+
******Total Number Of Rows Fetched ****** 2
OK
+--------------+
| NAME         |
+---++---------+
| 'liang'    |
+---++---------+
| 'anubhav'      |
+---++---------+
 ********** Total Rows Fetched When Quering The Individual Columns **********2
OK
+---++-------++--------------+
| Salary|| ID || NAME        |
+---++-------++--------------+
| 200000.0 || 1 || 'liang'  |
+---++-------++--------------+
| 20000.0 || 2 || 'anubhav'   |
+---++-------++--------------+
 ********** Total Rows Fetched When Quering The Out Of Order Columns **********2
- HiveExample
Exception encountered when invoking run on a nested suite - Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
          *** ABORTED ***
  java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:37)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:2390)

The currently active SparkContext was created at:

(No active SparkContext.)
  at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:716)
  at org.apache.spark.SparkContext$$anonfun$parallelize$1.apply(SparkContext.scala:715)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
  at org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
  at org.apache.spark.SparkContext.parallelize(SparkContext.scala:715)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
  ...
Run completed in 2 minutes, 51 seconds.
Total number of tests run: 20
Suites: completed 1, aborted 1
Tests: succeeded 20, failed 0, canceled 0, ignored 0, pending 0
*** 1 SUITE ABORTED ***
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3272

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3272/display/redirect>

------------------------------------------
[...truncated 44.37 KB...]
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
     [java]   Exception analyzing org.apache.carbondata.examples.SparkSessionExample$ using detector edu.umd.cs.findbugs.detect.FindBadCast2
     [java]     java.lang.IllegalArgumentException: Can't push void
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.pushValue(TypeFrameModelingVisitor.java:241)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitInvokeInstructionCommon(TypeFrameModelingVisitor.java:726)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitINVOKEINTERFACE(TypeFrameModelingVisitor.java:438)
     [java]       At org.apache.bcel.generic.INVOKEINTERFACE.accept(INVOKEINTERFACE.java:137)
     [java]       At edu.umd.cs.findbugs.ba.AbstractFrameModelingVisitor.analyzeInstruction(AbstractFrameModelingVisitor.java:84)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.analyzeInstruction(TypeFrameModelingVisitor.java:197)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:406)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.AbstractDataflowAnalysis.transfer(AbstractDataflowAnalysis.java:135)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:414)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.Dataflow.execute(Dataflow.java:376)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
     [java]   Failure examining basic blocks in Duplicate Branches detector
     [java]     java.lang.IllegalArgumentException: Can't push void
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.pushValue(TypeFrameModelingVisitor.java:241)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitInvokeInstructionCommon(TypeFrameModelingVisitor.java:726)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitINVOKEINTERFACE(TypeFrameModelingVisitor.java:438)
     [java]       At org.apache.bcel.generic.INVOKEINTERFACE.accept(INVOKEINTERFACE.java:137)
     [java]       At edu.umd.cs.findbugs.ba.AbstractFrameModelingVisitor.analyzeInstruction(AbstractFrameModelingVisitor.java:84)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.analyzeInstruction(TypeFrameModelingVisitor.java:197)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:406)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.AbstractDataflowAnalysis.transfer(AbstractDataflowAnalysis.java:135)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:414)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.Dataflow.execute(Dataflow.java:376)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
     [java]   Exception analyzing org.apache.carbondata.examples.SparkSessionExample$ using detector edu.umd.cs.findbugs.detect.FindUnrelatedTypesInGenericContainer
     [java]     java.lang.IllegalArgumentException: Can't push void
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.pushValue(TypeFrameModelingVisitor.java:241)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitInvokeInstructionCommon(TypeFrameModelingVisitor.java:726)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitINVOKEINTERFACE(TypeFrameModelingVisitor.java:438)
     [java]       At org.apache.bcel.generic.INVOKEINTERFACE.accept(INVOKEINTERFACE.java:137)
     [java]       At edu.umd.cs.findbugs.ba.AbstractFrameModelingVisitor.analyzeInstruction(AbstractFrameModelingVisitor.java:84)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.analyzeInstruction(TypeFrameModelingVisitor.java:197)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:406)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.AbstractDataflowAnalysis.transfer(AbstractDataflowAnalysis.java:135)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:414)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.Dataflow.execute(Dataflow.java:376)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
[INFO] Done FindBugs Analysis....
[INFO] 
[INFO] <<< findbugs-maven-plugin:3.0.4:check (analyze-compile) < :findbugs @ carbondata-examples <<<
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.4:check (analyze-compile) @ carbondata-examples ---
[INFO] BugInstance size is 0
[INFO] Error size is 14
[INFO] No errors/warnings found
[INFO] 
[INFO] --- maven-scala-plugin:2.15.2:compile (compile) @ carbondata-examples ---
[INFO] Checking for multiple versions of scala
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] includes = [**/*.java,**/*.scala,]
[INFO] excludes = []
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-resources-plugin:2.7:testResources (default-testResources) @ carbondata-examples ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/src/test/resources>
[INFO] Copying 3 resources
[INFO] 
[INFO] --- maven-compiler-plugin:3.2:testCompile (default-testCompile) @ carbondata-examples ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-surefire-plugin:2.18:test (default-test) @ carbondata-examples ---
[JENKINS] Recording test results[INFO] 

[INFO] --- maven-scala-plugin:2.15.2:testCompile (testCompile) @ carbondata-examples ---
[INFO] Checking for multiple versions of scala
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] includes = [**/*.java,**/*.scala,]
[INFO] excludes = []
[INFO] <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/src/test/scala>:-1: info: compiling
[INFO] Compiling 1 source files to <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/target/test-classes> at 1546000474271
[INFO] prepare-compile in 0 s
[INFO] compile in 5 s
[INFO] 
[INFO] --- scalatest-maven-plugin:1.0:test (test) @ carbondata-examples ---
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=512m; support was removed in 8.0
Discovery starting.
2018-12-28 12:34:40 INFO  CarbonProperties:707 - Property file path: <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/../../../conf/carbon.properties>
2018-12-28 12:34:40 INFO  CarbonProperties:900 - ------Using Carbon.properties --------
2018-12-28 12:34:40 INFO  CarbonProperties:901 - {}
2018-12-28 12:34:40 INFO  CarbonProperties:693 - Considered file format is: V3
2018-12-28 12:34:40 INFO  CarbonProperties:587 - Blocklet Size Configured value is "64
2018-12-28 12:34:40 WARN  CarbonProperties:465 - The enable unsafe sort value "null" is invalid. Using the default value "true
2018-12-28 12:34:40 WARN  CarbonProperties:477 - The enable off heap sort value "null" is invalid. Using the default value "true
2018-12-28 12:34:40 WARN  CarbonProperties:438 - The custom block distribution value "null" is invalid. Using the default value "false
2018-12-28 12:34:40 WARN  CarbonProperties:425 - The enable vector reader value "null" is invalid. Using the default value "true
2018-12-28 12:34:41 WARN  CarbonProperties:453 - The carbon task distribution value "null" is invalid. Using the default value "block
2018-12-28 12:34:41 WARN  CarbonProperties:556 - The enable auto handoff value "null" is invalid. Using the default value "true
2018-12-28 12:34:41 WARN  CarbonProperties:1298 - The specified value for property 512is invalid.
2018-12-28 12:34:41 WARN  CarbonProperties:1309 - The specified value for property carbon.sort.storage.inmemory.size.inmbis invalid. Taking the default value.512
2018-12-28 12:34:41 INFO  CarbonProperties:1512 - Considered value for min max byte limit for string is: 200
2018-12-28 12:34:41 INFO  TestQueryExecutor$:70 - project path: /home/jenkins/jenkins-slave/workspace/carbondata-master-spark-2.1
2018-12-28 12:34:41 INFO  TestQueryExecutor$:148 - Store path taken <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/target//store>
2018-12-28 12:34:41 INFO  TestQueryExecutor$:149 - Warehouse path taken <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/target//warehouse>
2018-12-28 12:34:41 INFO  TestQueryExecutor$:150 - Resource path taken /home/jenkins/jenkins-slave/workspace/carbondata-master-spark-2.1/integration/spark-common-test/src/test/resources
2018-12-28 12:34:41 INFO  Spark2TestQueryExecutor$:44 - use TestQueryExecutorImplV2
*** RUN ABORTED ***
  java.lang.ExceptionInInitializerError:
  at org.apache.spark.sql.test.Spark2TestQueryExecutor.sqlContext(Spark2TestQueryExecutor.scala:37)
  at org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
  at org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  at java.lang.Class.newInstance(Class.java:442)
  at org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
  at org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
  ...
  Cause: java.util.NoSuchElementException: None.get
  at scala.None$.get(Option.scala:347)
  at scala.None$.get(Option.scala:345)
  at org.apache.spark.util.CarbonReflectionUtils$.updateCarbonSerdeInfo(CarbonReflectionUtils.scala:346)
  at org.apache.spark.sql.CarbonSession$CarbonBuilder.getOrCreateCarbonSession(CarbonSession.scala:183)
  at org.apache.spark.sql.test.Spark2TestQueryExecutor$.<init>(Spark2TestQueryExecutor.scala:70)
  at org.apache.spark.sql.test.Spark2TestQueryExecutor$.<clinit>(Spark2TestQueryExecutor.scala)
  at org.apache.spark.sql.test.Spark2TestQueryExecutor.sqlContext(Spark2TestQueryExecutor.scala:37)
  at org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
  at org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  ...
[JENKINS] Recording test results

Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3271

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/3271/display/redirect>

------------------------------------------
[...truncated 44.37 KB...]
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
     [java]   Exception analyzing org.apache.carbondata.examples.SparkSessionExample$ using detector edu.umd.cs.findbugs.detect.FindBadCast2
     [java]     java.lang.IllegalArgumentException: Can't push void
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.pushValue(TypeFrameModelingVisitor.java:241)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitInvokeInstructionCommon(TypeFrameModelingVisitor.java:726)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitINVOKEINTERFACE(TypeFrameModelingVisitor.java:438)
     [java]       At org.apache.bcel.generic.INVOKEINTERFACE.accept(INVOKEINTERFACE.java:137)
     [java]       At edu.umd.cs.findbugs.ba.AbstractFrameModelingVisitor.analyzeInstruction(AbstractFrameModelingVisitor.java:84)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.analyzeInstruction(TypeFrameModelingVisitor.java:197)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:406)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.AbstractDataflowAnalysis.transfer(AbstractDataflowAnalysis.java:135)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:414)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.Dataflow.execute(Dataflow.java:376)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
     [java]   Failure examining basic blocks in Duplicate Branches detector
     [java]     java.lang.IllegalArgumentException: Can't push void
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.pushValue(TypeFrameModelingVisitor.java:241)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitInvokeInstructionCommon(TypeFrameModelingVisitor.java:726)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitINVOKEINTERFACE(TypeFrameModelingVisitor.java:438)
     [java]       At org.apache.bcel.generic.INVOKEINTERFACE.accept(INVOKEINTERFACE.java:137)
     [java]       At edu.umd.cs.findbugs.ba.AbstractFrameModelingVisitor.analyzeInstruction(AbstractFrameModelingVisitor.java:84)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.analyzeInstruction(TypeFrameModelingVisitor.java:197)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:406)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.AbstractDataflowAnalysis.transfer(AbstractDataflowAnalysis.java:135)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:414)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.Dataflow.execute(Dataflow.java:376)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
     [java]   Exception analyzing org.apache.carbondata.examples.SparkSessionExample$ using detector edu.umd.cs.findbugs.detect.FindUnrelatedTypesInGenericContainer
     [java]     java.lang.IllegalArgumentException: Can't push void
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.pushValue(TypeFrameModelingVisitor.java:241)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitInvokeInstructionCommon(TypeFrameModelingVisitor.java:726)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.visitINVOKEINTERFACE(TypeFrameModelingVisitor.java:438)
     [java]       At org.apache.bcel.generic.INVOKEINTERFACE.accept(INVOKEINTERFACE.java:137)
     [java]       At edu.umd.cs.findbugs.ba.AbstractFrameModelingVisitor.analyzeInstruction(AbstractFrameModelingVisitor.java:84)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeFrameModelingVisitor.analyzeInstruction(TypeFrameModelingVisitor.java:197)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:406)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transferInstruction(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.AbstractDataflowAnalysis.transfer(AbstractDataflowAnalysis.java:135)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:414)
     [java]       At edu.umd.cs.findbugs.ba.type.TypeAnalysis.transfer(TypeAnalysis.java:86)
     [java]       At edu.umd.cs.findbugs.ba.Dataflow.execute(Dataflow.java:376)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:83)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.TypeDataflowFactory.analyze(TypeDataflowFactory.java:43)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:160)
     [java]       At edu.umd.cs.findbugs.classfile.engine.bcel.CFGFactory.analyze(CFGFactory.java:65)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.analyzeMethod(AnalysisCache.java:369)
     [java]       At edu.umd.cs.findbugs.classfile.impl.AnalysisCache.getMethodAnalysis(AnalysisCache.java:322)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysis(ClassContext.java:1002)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getMethodAnalysisNoDataflowAnalysisException(ClassContext.java:987)
     [java]       At edu.umd.cs.findbugs.ba.ClassContext.getCFG(ClassContext.java:303)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.analyzeMethod(BuildUnconditionalParamDerefDatabase.java:115)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.considerMethod(BuildUnconditionalParamDerefDatabase.java:107)
     [java]       At edu.umd.cs.findbugs.detect.BuildUnconditionalParamDerefDatabase.visitClassContext(BuildUnconditionalParamDerefDatabase.java:91)
     [java]       At edu.umd.cs.findbugs.DetectorToDetector2Adapter.visitClass(DetectorToDetector2Adapter.java:76)
     [java]       At edu.umd.cs.findbugs.FindBugs2.analyzeApplication(FindBugs2.java:1089)
     [java]       At edu.umd.cs.findbugs.FindBugs2.execute(FindBugs2.java:283)
     [java]       At edu.umd.cs.findbugs.FindBugs.runMain(FindBugs.java:402)
     [java]       At edu.umd.cs.findbugs.FindBugs2.main(FindBugs2.java:1200)
[INFO] Done FindBugs Analysis....
[INFO] 
[INFO] <<< findbugs-maven-plugin:3.0.4:check (analyze-compile) < :findbugs @ carbondata-examples <<<
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.4:check (analyze-compile) @ carbondata-examples ---
[INFO] BugInstance size is 0
[INFO] Error size is 14
[INFO] No errors/warnings found
[INFO] 
[INFO] --- maven-scala-plugin:2.15.2:compile (compile) @ carbondata-examples ---
[INFO] Checking for multiple versions of scala
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] includes = [**/*.java,**/*.scala,]
[INFO] excludes = []
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-resources-plugin:2.7:testResources (default-testResources) @ carbondata-examples ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/src/test/resources>
[INFO] Copying 3 resources
[INFO] 
[INFO] --- maven-compiler-plugin:3.2:testCompile (default-testCompile) @ carbondata-examples ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-surefire-plugin:2.18:test (default-test) @ carbondata-examples ---
[JENKINS] Recording test results
[INFO] 
[INFO] --- maven-scala-plugin:2.15.2:testCompile (testCompile) @ carbondata-examples ---
[INFO] Checking for multiple versions of scala
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] artifact joda-time:joda-time: checking for updates from central
[INFO] artifact joda-time:joda-time: checking for updates from apache.snapshots.https
[INFO] artifact joda-time:joda-time: checking for updates from repository.jboss.org
[INFO] includes = [**/*.java,**/*.scala,]
[INFO] excludes = []
[INFO] <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/src/test/scala>:-1: info: compiling
[INFO] Compiling 1 source files to <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/target/test-classes> at 1545998274799
[INFO] prepare-compile in 0 s
[INFO] compile in 5 s
[INFO] 
[INFO] --- scalatest-maven-plugin:1.0:test (test) @ carbondata-examples ---
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=512m; support was removed in 8.0
Discovery starting.
2018-12-28 11:58:01 INFO  CarbonProperties:707 - Property file path: <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/../../../conf/carbon.properties>
2018-12-28 11:58:01 INFO  CarbonProperties:900 - ------Using Carbon.properties --------
2018-12-28 11:58:01 INFO  CarbonProperties:901 - {}
2018-12-28 11:58:01 INFO  CarbonProperties:693 - Considered file format is: V3
2018-12-28 11:58:01 INFO  CarbonProperties:587 - Blocklet Size Configured value is "64
2018-12-28 11:58:01 WARN  CarbonProperties:465 - The enable unsafe sort value "null" is invalid. Using the default value "true
2018-12-28 11:58:01 WARN  CarbonProperties:477 - The enable off heap sort value "null" is invalid. Using the default value "true
2018-12-28 11:58:01 WARN  CarbonProperties:438 - The custom block distribution value "null" is invalid. Using the default value "false
2018-12-28 11:58:01 WARN  CarbonProperties:425 - The enable vector reader value "null" is invalid. Using the default value "true
2018-12-28 11:58:01 WARN  CarbonProperties:453 - The carbon task distribution value "null" is invalid. Using the default value "block
2018-12-28 11:58:01 WARN  CarbonProperties:556 - The enable auto handoff value "null" is invalid. Using the default value "true
2018-12-28 11:58:01 WARN  CarbonProperties:1298 - The specified value for property 512is invalid.
2018-12-28 11:58:01 WARN  CarbonProperties:1309 - The specified value for property carbon.sort.storage.inmemory.size.inmbis invalid. Taking the default value.512
2018-12-28 11:58:01 INFO  CarbonProperties:1512 - Considered value for min max byte limit for string is: 200
2018-12-28 11:58:01 INFO  TestQueryExecutor$:70 - project path: /home/jenkins/jenkins-slave/workspace/carbondata-master-spark-2.1
2018-12-28 11:58:01 INFO  TestQueryExecutor$:148 - Store path taken <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/target//store>
2018-12-28 11:58:01 INFO  TestQueryExecutor$:149 - Warehouse path taken <https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-examples/ws/target//warehouse>
2018-12-28 11:58:01 INFO  TestQueryExecutor$:150 - Resource path taken /home/jenkins/jenkins-slave/workspace/carbondata-master-spark-2.1/integration/spark-common-test/src/test/resources
2018-12-28 11:58:01 INFO  Spark2TestQueryExecutor$:44 - use TestQueryExecutorImplV2
*** RUN ABORTED ***
  java.lang.ExceptionInInitializerError:
  at org.apache.spark.sql.test.Spark2TestQueryExecutor.sqlContext(Spark2TestQueryExecutor.scala:37)
  at org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
  at org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  at java.lang.Class.newInstance(Class.java:442)
  at org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:69)
  at org.scalatest.tools.DiscoverySuite$$anonfun$1.apply(DiscoverySuite.scala:38)
  ...
  Cause: java.util.NoSuchElementException: None.get
  at scala.None$.get(Option.scala:347)
  at scala.None$.get(Option.scala:345)
  at org.apache.spark.util.CarbonReflectionUtils$.updateCarbonSerdeInfo(CarbonReflectionUtils.scala:346)
  at org.apache.spark.sql.CarbonSession$CarbonBuilder.getOrCreateCarbonSession(CarbonSession.scala:183)
  at org.apache.spark.sql.test.Spark2TestQueryExecutor$.<init>(Spark2TestQueryExecutor.scala:70)
  at org.apache.spark.sql.test.Spark2TestQueryExecutor$.<clinit>(Spark2TestQueryExecutor.scala)
  at org.apache.spark.sql.test.Spark2TestQueryExecutor.sqlContext(Spark2TestQueryExecutor.scala:37)
  at org.apache.spark.sql.test.util.QueryTest.<init>(QueryTest.scala:115)
  at org.apache.carbondata.examplesCI.RunExamples.<init>(RunExamples.scala:35)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  ...
[JENKINS] Recording test results