You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by ch...@apache.org on 2021/02/23 09:38:10 UTC

[hawq] branch master updated (aa1919b -> 2a80d79)

This is an automated email from the ASF dual-hosted git repository.

chiyang10000 pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hawq.git.


    from aa1919b  HAWQ-1782. Fix failed to read EXTERNAL TABLE of GPFDIST protocol
     new 117bbfe  HAWQ-1784. Fix TestCreateTable depends on GUC setting
     new 2a80d79  HAWQ-1785. Fix HDFS metadata mismatch in GitHub Action

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .github/workflows/build.yml                     | 2 ++
 .github/workflows/scripts/gtest_filter_negative | 2 --
 src/Makefile.global.in                          | 5 +++--
 src/backend/cdb/cdbdatalocality.c               | 2 +-
 src/test/feature/catalog/test_create_table.cpp  | 4 ++++
 src/test/feature/lib/compent_config.cpp         | 2 ++
 src/test/feature/lib/hdfs_config.cpp            | 3 +++
 src/test/feature/lib/sql_util.cpp               | 6 +++---
 src/test/feature/lib/sql_util.h                 | 1 +
 9 files changed, 19 insertions(+), 8 deletions(-)


[hawq] 01/02: HAWQ-1784. Fix TestCreateTable depends on GUC setting

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chiyang10000 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hawq.git

commit 117bbfe7741d52f81fc3c408f8cae6c611628713
Author: Chiyang Wan <ch...@gmail.com>
AuthorDate: Tue Feb 23 10:04:07 2021 +0800

    HAWQ-1784. Fix TestCreateTable depends on GUC setting
    
    It collects saved GUC values into each SQLUtility instance.
---
 src/test/feature/catalog/test_create_table.cpp | 4 ++++
 src/test/feature/lib/sql_util.cpp              | 6 +++---
 src/test/feature/lib/sql_util.h                | 1 +
 3 files changed, 8 insertions(+), 3 deletions(-)

diff --git a/src/test/feature/catalog/test_create_table.cpp b/src/test/feature/catalog/test_create_table.cpp
index 9219748..240f39c 100644
--- a/src/test/feature/catalog/test_create_table.cpp
+++ b/src/test/feature/catalog/test_create_table.cpp
@@ -188,6 +188,7 @@ TEST_F(TestCreateTable, TestCreateTable1) {
 TEST_F(TestCreateTable, TestCreateTableInherits) {
   hawq::test::SQLUtility util;
   // prepare
+  util.setGUCValue("default_hash_table_bucket_number", "6");
   util.execute("DROP TABLE IF EXISTS t1_1_6, t1_1_5, t1_1_4, t1_1_3, "
 		                            "t1_1_2, t1_1_1, t1_1_w, t1_1, t1 CASCADE");
 
@@ -238,6 +239,7 @@ TEST_F(TestCreateTable, TestCreateTableInherits) {
 TEST_F(TestCreateTable, TestCreateTableDistribution1) {
   hawq::test::SQLUtility util;
   // prepare
+  util.setGUCValue("default_hash_table_bucket_number", "6");
   util.execute("DROP TABLE IF EXISTS t1_3_4, t1_3_3, t1_3_2, t1_3_1, t1_3_w, t1_3 CASCADE");
   util.execute("DROP TABLE IF EXISTS t1_2_4, t1_2_3, t1_2_2, t1_2_1, t1_2_w, t1_2 CASCADE");
   util.execute("DROP TABLE IF EXISTS t1 CASCADE");
@@ -307,6 +309,7 @@ TEST_F(TestCreateTable, TestCreateTableDistribution1) {
 TEST_F(TestCreateTable, TestCreateTableDistribution2) {
   hawq::test::SQLUtility util;
   // prepare
+  util.setGUCValue("default_hash_table_bucket_number", "6");
   util.execute("DROP TABLE IF EXISTS t2_2, t2_2_w, t2_2_1, t2_2_2, t2_2_3, t2_2_4 CASCADE");
   util.execute("DROP TABLE IF EXISTS t2_3, t2_3_w, t2_3_1, t2_3_2, t2_3_3, t2_3_4 CASCADE");
   util.execute("DROP TABLE IF EXISTS t2_1_1, t2_1, t2 CASCADE");
@@ -428,6 +431,7 @@ TEST_F(TestCreateTable, TestCreateTableDistribution3) {
 TEST_F(TestCreateTable, TestCreateTableDistribution4) {
   hawq::test::SQLUtility util;
   // prepare
+  util.setGUCValue("default_hash_table_bucket_number", "6");
   util.execute("DROP TABLE IF EXISTS t4");
 
   // test
diff --git a/src/test/feature/lib/sql_util.cpp b/src/test/feature/lib/sql_util.cpp
index 1dbef13..3ad027c 100644
--- a/src/test/feature/lib/sql_util.cpp
+++ b/src/test/feature/lib/sql_util.cpp
@@ -176,7 +176,7 @@ void SQLUtility::exec(const string &sql) {
 void SQLUtility::execIgnore(const string &sql) { conn->runSQLCommand(sql); }
 
 string SQLUtility::execute(const string &sql, bool check) {
-  conn->runSQLCommand("SET SEARCH_PATH=" + schemaName + ";" + sql);
+  conn->runSQLCommand("SET SEARCH_PATH=" + schemaName + ";" + savedGUCValue + sql);
   EXPECT_NE(conn.get(), nullptr);
   if (check) {
     EXPECT_EQ(0, conn->getLastStatus()) << sql << " " << conn->getLastResult();
@@ -186,7 +186,7 @@ string SQLUtility::execute(const string &sql, bool check) {
 }
 
 bool SQLUtility::executeSql(const std::string &sql) {
-  conn->runSQLCommand("SET SEARCH_PATH=" + schemaName + ";" + sql);
+  conn->runSQLCommand("SET SEARCH_PATH=" + schemaName + ";" + savedGUCValue + sql);
   EXPECT_NE(conn.get(), nullptr);
   return !conn->getLastStatus();
 }
@@ -767,7 +767,7 @@ string SQLUtility::getTestRootPath() {
 
 void SQLUtility::setGUCValue(const std::string &guc, const std::string &value) {
   string sql = "set " + guc + " = " + value;
-  execute(sql, true);
+  savedGUCValue += sql + ';';
 }
 
 std::string SQLUtility::getGUCValue(const std::string &guc) {
diff --git a/src/test/feature/lib/sql_util.h b/src/test/feature/lib/sql_util.h
index 748ad31..3ca67a9 100644
--- a/src/test/feature/lib/sql_util.h
+++ b/src/test/feature/lib/sql_util.h
@@ -397,6 +397,7 @@ class SQLUtility {
   std::unique_ptr<hawq::test::PSQL> conn;
   std::string databaseName;
   std::string schemaName;
+  std::string savedGUCValue;
 
 };  // class SQLUtility
 


[hawq] 02/02: HAWQ-1785. Fix HDFS metadata mismatch in GitHub Action

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chiyang10000 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hawq.git

commit 2a80d792acb7f2f7abce9f158e6199dcb3a85e11
Author: Chiyang Wan <ch...@gmail.com>
AuthorDate: Tue Feb 23 10:06:16 2021 +0800

    HAWQ-1785. Fix HDFS metadata mismatch in GitHub Action
---
 .github/workflows/build.yml                     | 2 ++
 .github/workflows/scripts/gtest_filter_negative | 2 --
 src/Makefile.global.in                          | 5 +++--
 src/backend/cdb/cdbdatalocality.c               | 2 +-
 src/test/feature/lib/compent_config.cpp         | 2 ++
 src/test/feature/lib/hdfs_config.cpp            | 3 +++
 6 files changed, 11 insertions(+), 5 deletions(-)

diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 11899d1..41de4ac 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -42,6 +42,8 @@ jobs:
           fi
         done
         install_name_tool -add_rpath $GITHUB_WORKSPACE/dependency-Darwin/package/lib/perl5/5.28.0/darwin-thread-multi-2level/CORE/ $GITHUB_WORKSPACE/dependency-Darwin/package/bin/perl
+        rm -rf $GITHUB_WORKSPACE/dependency-Darwin/package/include/hdfs
+        rm -rf $GITHUB_WORKSPACE/dependency-Darwin/package/lib/libhdfs3*
 
     - name: configure
       timeout-minutes: 10
diff --git a/.github/workflows/scripts/gtest_filter_negative b/.github/workflows/scripts/gtest_filter_negative
index b20aa7f..d46d369 100644
--- a/.github/workflows/scripts/gtest_filter_negative
+++ b/.github/workflows/scripts/gtest_filter_negative
@@ -14,7 +14,6 @@
 # limitations under the License.
 
 export GTEST_FILTER_NEGATIVE=\
-TestCommonLib.TestHdfsConfig:\
 TestExtOrc.TestNormalPath:\
 TestExtOrc.BoolTypeTest:\
 TestExtOrc.DateTypeTest:\
@@ -28,7 +27,6 @@ TestVexecutor.ProjAndQual:\
 TestVexecutor.date:\
 TestVexecutor.vagg:\
 TestCloud.*:\
-TestAlterTable.*:\
 TestHawqExtract.*:\
 TestHawqRegister.*:\
 TestRangerPolicyHelper.*:\
diff --git a/src/Makefile.global.in b/src/Makefile.global.in
index 2b47748..0d98b73 100644
--- a/src/Makefile.global.in
+++ b/src/Makefile.global.in
@@ -220,8 +220,9 @@ COLLATEINDEX	= @COLLATEINDEX@
 # Compilers
 
 CPP = @CPP@
-CPPFLAGS = @CPPFLAGS@
-CPPFLAGS += -I/usr/local/hawq/include
+CPPFLAGS = -I$(abs_top_srcdir)/depends/libhdfs3/build/install$(prefix)/include
+CPPFLAGS += -I$(abs_top_srcdir)/depends/libyarn/build/install$(prefix)/include
+CPPFLAGS += @CPPFLAGS@
 
 ifdef PGXS
 override CPPFLAGS := -I$(includedir_server) -I$(includedir_internal) $(CPPFLAGS)
diff --git a/src/backend/cdb/cdbdatalocality.c b/src/backend/cdb/cdbdatalocality.c
index a54527e..8858735 100644
--- a/src/backend/cdb/cdbdatalocality.c
+++ b/src/backend/cdb/cdbdatalocality.c
@@ -1183,7 +1183,7 @@ static void double_check_hdfs_metadata_logic_length(BlockLocation * locations,in
 		hdfs_file_len += locations[i].length;
 	}
 	if(logic_len > hdfs_file_len) {
-		elog(ERROR, "hdfs file length does not equal to metadata logic length!");
+		elog(ERROR, "hdfs file length does not equal to metadata logic length! (%ld != %ld)", hdfs_file_len, logic_len);
 	}
 }
 
diff --git a/src/test/feature/lib/compent_config.cpp b/src/test/feature/lib/compent_config.cpp
index b018ff6..21b64c5 100644
--- a/src/test/feature/lib/compent_config.cpp
+++ b/src/test/feature/lib/compent_config.cpp
@@ -202,6 +202,8 @@ void CompentConfig::runCommandAndGetNodesPorts(
     auto lines = hawq::test::split(result, '\n');
     for (size_t i = 0; i < lines.size(); i++) {
       string valueLine = lines[i];
+      if (valueLine.find("ssh:") != string::npos)
+        continue;
       if (valueLine.find("WARNING") != string::npos)
         continue;
       auto datanodeInfo = hawq::test::split(valueLine, ':');
diff --git a/src/test/feature/lib/hdfs_config.cpp b/src/test/feature/lib/hdfs_config.cpp
index 24938bb..d5598af 100644
--- a/src/test/feature/lib/hdfs_config.cpp
+++ b/src/test/feature/lib/hdfs_config.cpp
@@ -316,6 +316,9 @@ void HdfsConfig::getNamenodes(std::vector<string> &namenodes,
     auto lines = hawq::test::split(result, '\n');
     for (size_t i = 0; i < lines.size(); i++) {
         string valueLine = lines[i];
+        if (valueLine.find("ssh:") != string::npos ||
+            valueLine.find("Warning:") != string::npos)
+          continue;
         auto namenodeInfo = hawq::test::split(valueLine, ':');
         if (namenodeInfo.size() == 2) {
             namenodes.push_back(hawq::test::trim(namenodeInfo[0]));