You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@doris.apache.org by mo...@apache.org on 2022/10/25 14:29:13 UTC

[doris] branch master updated: [improvement](test) add sync for test_agg_keys_schema_change_datev2 (#13643)

This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 6f18726f01 [improvement](test) add sync for test_agg_keys_schema_change_datev2 (#13643)
6f18726f01 is described below

commit 6f18726f013971b62ae69e975d355efa2f50e6e4
Author: Mingyu Chen <mo...@gmail.com>
AuthorDate: Tue Oct 25 22:29:05 2022 +0800

    [improvement](test) add sync for test_agg_keys_schema_change_datev2 (#13643)
    
    1. add "sync" to avoid some potential meta sync problem when running regression test on multi-node cluster
    2. Use /tmp dir as dest dir of outfile test, to avoid "No such file or directory" error.
---
 regression-test/suites/export_p0/test_outfile.groovy         |  3 ++-
 regression-test/suites/export_p0/test_outfile_expr.groovy    |  3 ++-
 regression-test/suites/export_p0/test_outfile_parquet.groovy |  3 ++-
 .../suites/export_p0/test_outfile_separator.groovy           |  3 ++-
 .../suites/nereids_syntax_p0/agg_with_const.groovy           |  1 +
 .../datev2/test_agg_keys_schema_change_datev2.groovy         | 12 ++++++++++++
 6 files changed, 21 insertions(+), 4 deletions(-)

diff --git a/regression-test/suites/export_p0/test_outfile.groovy b/regression-test/suites/export_p0/test_outfile.groovy
index 1271ddad37..4fa3350268 100644
--- a/regression-test/suites/export_p0/test_outfile.groovy
+++ b/regression-test/suites/export_p0/test_outfile.groovy
@@ -49,7 +49,8 @@ suite("test_outfile") {
         return
     }
     def tableName = "outfile_test"
-    def outFilePath = """${context.file.parent}/test_outfile"""
+    def uuid = UUID.randomUUID().toString()
+    def outFilePath = """/tmp/test_outfile_${uuid}"""
     try {
         sql """ DROP TABLE IF EXISTS ${tableName} """
         sql """
diff --git a/regression-test/suites/export_p0/test_outfile_expr.groovy b/regression-test/suites/export_p0/test_outfile_expr.groovy
index 0e3f5adbb9..6649123958 100644
--- a/regression-test/suites/export_p0/test_outfile_expr.groovy
+++ b/regression-test/suites/export_p0/test_outfile_expr.groovy
@@ -49,7 +49,8 @@ suite("test_outfile_expr") {
         return
     }
     def tableName = "outfile_test_expr"
-    def outFilePath = """${context.file.parent}/tmp_expr"""
+    def uuid = UUID.randomUUID().toString()
+    def outFilePath = """/tmp/test_outfile_expr_${uuid}"""
     try {
         sql """ DROP TABLE IF EXISTS ${tableName} """
         sql """
diff --git a/regression-test/suites/export_p0/test_outfile_parquet.groovy b/regression-test/suites/export_p0/test_outfile_parquet.groovy
index ac710410e9..8b1944d2fb 100644
--- a/regression-test/suites/export_p0/test_outfile_parquet.groovy
+++ b/regression-test/suites/export_p0/test_outfile_parquet.groovy
@@ -53,7 +53,8 @@ suite("test_outfile_parquet") {
     }
     def tableName = "outfile_parquet_test"
     def tableName2 = "outfile_parquet_test2"
-    def outFilePath = """${context.file.parent}/test_outfile_parquet"""
+    def uuid = UUID.randomUUID().toString()
+    def outFilePath = """/tmp/test_outfile_parquet_${uuid}"""
     try {
         sql """ DROP TABLE IF EXISTS ${tableName} """
         sql """
diff --git a/regression-test/suites/export_p0/test_outfile_separator.groovy b/regression-test/suites/export_p0/test_outfile_separator.groovy
index 07766db680..4d11af6b09 100644
--- a/regression-test/suites/export_p0/test_outfile_separator.groovy
+++ b/regression-test/suites/export_p0/test_outfile_separator.groovy
@@ -50,7 +50,8 @@ suite("test_outfile_separator") {
     }
     def dbName = context.config.getDbNameByFile(context.file)
     def tableName = "outfile_test_separator"
-    def outFilePath = """${context.file.parent}/test_outfile_separator"""
+    def uuid = UUID.randomUUID().toString()
+    def outFilePath = """/tmp/test_outfile_separator_${uuid}"""
     try {
         sql """ DROP TABLE IF EXISTS ${tableName} """
         sql """
diff --git a/regression-test/suites/nereids_syntax_p0/agg_with_const.groovy b/regression-test/suites/nereids_syntax_p0/agg_with_const.groovy
index c1d48d7766..c5d6abe08f 100644
--- a/regression-test/suites/nereids_syntax_p0/agg_with_const.groovy
+++ b/regression-test/suites/nereids_syntax_p0/agg_with_const.groovy
@@ -38,6 +38,7 @@ suite("agg_with_const") {
 
     sql "SET enable_fallback_to_original_planner=false"
 
+    sql """sync"""
     qt_select """
         select count(2) + 1, sum(2) + sum(col1) from agg_with_const_tbl
     """
diff --git a/regression-test/suites/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.groovy b/regression-test/suites/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.groovy
index 6784c533a0..796793ae5a 100644
--- a/regression-test/suites/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.groovy
+++ b/regression-test/suites/schema_change_p0/datev2/test_agg_keys_schema_change_datev2.groovy
@@ -144,10 +144,13 @@ suite("test_agg_keys_schema_change_datev2") {
             }
         }
     }
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     do_compact(tbName)
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     sql """delete from ${tbName} where `datev3` = '2022-01-01';"""
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     sql """ alter table ${tbName} drop column `datev3` """
     max_try_time = 1000
@@ -175,6 +178,7 @@ suite("test_agg_keys_schema_change_datev2") {
     sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');"""
     sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');"""
     sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');"""
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     sql """ alter  table ${tbName} add column `datev3` datetimev2 DEFAULT '2022-01-01 11:11:11' """
     max_try_time = 1000
@@ -189,10 +193,13 @@ suite("test_agg_keys_schema_change_datev2") {
             }
         }
     }
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     do_compact(tbName)
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     sql """delete from ${tbName} where `datev3` = '2022-01-01 11:11:11';"""
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     sql """ alter table ${tbName} drop column `datev3` """
     max_try_time = 1000
@@ -220,6 +227,7 @@ suite("test_agg_keys_schema_change_datev2") {
     sql """ insert into ${tbName} (`datek1`, `datek2`, `datev2`) values('2022-01-04', '2022-01-04 11:11:11', '2022-01-04 11:11:11');"""
     sql """ insert into ${tbName} (`datek1`, `datev1`, `datev2`) values('2022-01-05', '2022-01-05', '2022-01-05 11:11:11');"""
     sql """ insert into ${tbName} (`datek2`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-06', '2022-01-06 11:11:11');"""
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     sql """ alter  table ${tbName} add column `datev3` datetimev2(3) DEFAULT '2022-01-01 11:11:11.111' """
     max_try_time = 1000
@@ -234,10 +242,13 @@ suite("test_agg_keys_schema_change_datev2") {
             }
         }
     }
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     do_compact(tbName)
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     sql """delete from ${tbName} where `datev3` = '2022-01-01 11:11:11';"""
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     sql """ insert into ${tbName} values('2022-01-02', '2022-01-02 11:11:11', '2022-01-02 11:11:11.222', '2022-01-02', '2022-01-02 11:11:11');"""
     sql """ insert into ${tbName} (`datek1`, `datek2`, `datev3`, `datev1`) values('2022-01-03', '2022-01-03 11:11:11', '2022-01-02 11:11:11.222', '2022-01-03');"""
@@ -245,6 +256,7 @@ suite("test_agg_keys_schema_change_datev2") {
     sql """ insert into ${tbName} (`datek1`, `datev3`, `datev1`, `datev2`) values('2022-01-05', '2022-01-02 11:11:11.222', '2022-01-05', '2022-01-05 11:11:11');"""
     sql """ insert into ${tbName} (`datek2`, `datev3`, `datev1`, `datev2`) values('2022-01-06 11:11:11', '2022-01-02 11:11:11.222', '2022-01-06', '2022-01-06 11:11:11');"""
     sql """delete from ${tbName} where `datev3` = '2022-01-01 11:11:11.111';"""
+    sql """sync"""
     qt_sql """select /*+ SET_VAR(enable_vectorized_engine=true) */ * from ${tbName} ORDER BY `datek1`;"""
     sql """ alter table ${tbName} drop column `datev3` """
     max_try_time = 1000


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@doris.apache.org
For additional commands, e-mail: commits-help@doris.apache.org