You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@carbondata.apache.org by ravipesala <gi...@git.apache.org> on 2017/09/07 03:25:02 UTC

[GitHub] carbondata pull request #1332: [CARBONDATA-1456]Regenerate cached hive resul...

Github user ravipesala commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/1332#discussion_r137439030
  
    --- Diff: integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala ---
    @@ -84,22 +82,34 @@ class QueryTest extends PlanTest with Suite {
         checkAnswer(df, expectedAnswer.collect())
       }
     
    -  protected def checkAnswer(carbon: String, hive: String, uniqueIdentifier:String): Unit = {
    -    val path = TestQueryExecutor.hiveresultpath + "/"+uniqueIdentifier
    +  protected def checkAnswer(carbon: String, hive: String, uniqueIdentifier: String): Unit = {
    +    val path = TestQueryExecutor.hiveresultpath + "/" + uniqueIdentifier
         if (FileFactory.isFileExist(path, FileFactory.getFileType(path))) {
    -      val objinp = new ObjectInputStream(FileFactory.getDataInputStream(path, FileFactory.getFileType(path)))
    +      val objinp = new ObjectInputStream(FileFactory
    +        .getDataInputStream(path, FileFactory.getFileType(path)))
           val rows = objinp.readObject().asInstanceOf[Array[Row]]
           objinp.close()
    -      checkAnswer(sql(carbon), rows)
    +      QueryTest.checkAnswer(sql(carbon), rows) match {
    +        case Some(errorMessage) => {
    +          FileFactory.deleteFile(path, FileFactory.getFileType(path))
    +          writeAndCheckAnswer(carbon, hive, path)
    --- End diff --
    
    Doesn't it go to endless loop when test fails?


---