You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@doris.apache.org by yi...@apache.org on 2022/07/22 12:54:40 UTC

[doris] branch master updated: [regression]add the cases for csv/orc/parquet file format (#11082)

This is an automated email from the ASF dual-hosted git repository.

yiguolei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 49a17bea99 [regression]add the cases for csv/orc/parquet file format (#11082)
49a17bea99 is described below

commit 49a17bea99da5289a67977d0efb302e8643fe463
Author: carlvinhust2012 <hu...@126.com>
AuthorDate: Fri Jul 22 20:54:34 2022 +0800

    [regression]add the cases for csv/orc/parquet file format (#11082)
    
    Co-authored-by: hucheng01 <hu...@baidu.com>
---
 .../data/load/broker_load/simple_array.csv         |   5 +
 .../data/load/broker_load/simple_array.orc         | Bin 0 -> 1736 bytes
 .../data/load/broker_load/simple_array.parquet     | Bin 0 -> 7773 bytes
 .../data/load/broker_load/test_array_load.out      |  64 +++++++
 .../suites/load/broker_load/test_array_load.groovy | 196 +++++++++++++++++++--
 5 files changed, 246 insertions(+), 19 deletions(-)

diff --git a/regression-test/data/load/broker_load/simple_array.csv b/regression-test/data/load/broker_load/simple_array.csv
new file mode 100644
index 0000000000..0514c702de
--- /dev/null
+++ b/regression-test/data/load/broker_load/simple_array.csv
@@ -0,0 +1,5 @@
+1/[1,2,3,4,5]/[32767,32768,32769]/[65534,65535,65536]/["a","b","c","d","e"]/["hello","world"]/["1991-01-01"]/["1991-01-01 00:00:00"]/[0.33,0.67]/[3.1415926,0.878787878]/[1,1.2,1.3]
+2/[6,7,8,9,10]/[32767,32768,32769]/[65534,65535,65536]/["a","b","c","d","e"]/["hello","world"]/["1991-01-01"]/["1991-01-01 00:00:00"]/[0.33,0.67]/[3.1415926,0.878787878]/[1,1.2,1.3]
+3/[]/[32767,32768,32769]/[null,null,65536]/["a","b","c","d","e"]/["happy","birthday"]/["1991-01-01"]/["1991-01-01 00:00:00"]/[0.33,0.67]/[3.1415926,0.878787878]/[1,1.2,1.3]
+4/[null]/[32767,32768,32769]/[ null,null,65536]/["a","b","c","d","e"]/["hello","world"]/["1991-01-01"]/["1991-01-01 00:00:00"]/[0.33,0.67]/[3.1415926,0.878787878]/[1,1.2,1.3]
+5/[null,null]/[32767,32768,null]/[65534,null,65536]/["a","b","c","d","e"]/["hello","world"]/["1991-01-01"]/["1991-01-01 00:00:00"]/[0.33,0.67]/[3.1415926,0.878787878]/[1,1.2,1.3]
\ No newline at end of file
diff --git a/regression-test/data/load/broker_load/simple_array.orc b/regression-test/data/load/broker_load/simple_array.orc
new file mode 100644
index 0000000000..4253b408d9
Binary files /dev/null and b/regression-test/data/load/broker_load/simple_array.orc differ
diff --git a/regression-test/data/load/broker_load/simple_array.parquet b/regression-test/data/load/broker_load/simple_array.parquet
new file mode 100644
index 0000000000..2aaa09f9a6
Binary files /dev/null and b/regression-test/data/load/broker_load/simple_array.parquet differ
diff --git a/regression-test/data/load/broker_load/test_array_load.out b/regression-test/data/load/broker_load/test_array_load.out
index e6189ea55d..da94539584 100644
--- a/regression-test/data/load/broker_load/test_array_load.out
+++ b/regression-test/data/load/broker_load/test_array_load.out
@@ -31,3 +31,67 @@
 5	[NULL, NULL]	[32767, 32768, NULL]	[65534, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
 100	[1, 2, 3]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c']	['hello', 'world']	[2022-07-13]	[2022-07-13 12:30:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[4, 5.5, 6.67]
 
+-- !select --
+1	[1, 2, 3, 4, 5]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+2	[6, 7, 8, 9, 10]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+3	[]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['happy', 'birthday']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+4	[NULL]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+5	[NULL, NULL]	[32767, 32768, NULL]	[65534, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+100	[1, 2, 3]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c']	['hello', 'world']	[2022-07-13]	[2022-07-13 12:30:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[4, 5.5, 6.67]
+
+-- !select --
+1	[1, 2, 3, 4, 5]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+2	[6, 7, 8, 9, 10]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+3	[]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['happy', 'birthday']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+4	[NULL]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+5	[NULL, NULL]	[32767, 32768, NULL]	[65534, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+100	[1, 2, 3]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c']	['hello', 'world']	[2022-07-13]	[2022-07-13 12:30:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[4, 5.5, 6.67]
+
+-- !select --
+1	[1, 2, 3, 4, 5]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+2	[6, 7, 8, 9, 10]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+3	[]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['happy', 'birthday']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+4	[NULL]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+5	[NULL, NULL]	[32767, 32768, NULL]	[65534, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+100	[1, 2, 3]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c']	['hello', 'world']	[2022-07-13]	[2022-07-13 12:30:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[4, 5.5, 6.67]
+
+-- !select --
+1	[1, 2, 3, 4, 5]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+2	[6, 7, 8, 9, 10]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+3	[]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['happy', 'birthday']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+4	[NULL]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+5	[NULL, NULL]	[32767, 32768, NULL]	[65534, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+100	[1, 2, 3]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c']	['hello', 'world']	[2022-07-13]	[2022-07-13 12:30:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[4, 5.5, 6.67]
+
+-- !select --
+1	[1, 2, 3, 4, 5]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+2	[6, 7, 8, 9, 10]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+3	[]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['happy', 'birthday']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+4	[NULL]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+5	[NULL, NULL]	[32767, 32768, NULL]	[65534, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+100	[1, 2, 3]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c']	['hello', 'world']	[2022-07-13]	[2022-07-13 12:30:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[4, 5.5, 6.67]
+
+-- !select --
+1	[1, 2, 3, 4, 5]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+2	[6, 7, 8, 9, 10]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+3	[]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['happy', 'birthday']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+4	[NULL]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+5	[NULL, NULL]	[32767, 32768, NULL]	[65534, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+100	[1, 2, 3]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c']	['hello', 'world']	[2022-07-13]	[2022-07-13 12:30:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[4, 5.5, 6.67]
+
+-- !select --
+1	[1, 2, 3, 4, 5]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+2	[6, 7, 8, 9, 10]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+3	[]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['happy', 'birthday']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+4	[NULL]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+5	[NULL, NULL]	[32767, 32768, NULL]	[65534, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+100	[1, 2, 3]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c']	['hello', 'world']	[2022-07-13]	[2022-07-13 12:30:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[4, 5.5, 6.67]
+
+-- !select --
+1	[1, 2, 3, 4, 5]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+2	[6, 7, 8, 9, 10]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+3	[]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['happy', 'birthday']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+4	[NULL]	[32767, 32768, 32769]	[NULL, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+5	[NULL, NULL]	[32767, 32768, NULL]	[65534, NULL, 65536]	['a', 'b', 'c', 'd', 'e']	['hello', 'world']	[1991-01-01]	[1991-01-01 00:00:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[1, 1.2, 1.3]
+100	[1, 2, 3]	[32767, 32768, 32769]	[65534, 65535, 65536]	['a', 'b', 'c']	['hello', 'world']	[2022-07-13]	[2022-07-13 12:30:00]	[0.33, 0.67]	[3.1415926, 0.878787878]	[4, 5.5, 6.67]
+
diff --git a/regression-test/suites/load/broker_load/test_array_load.groovy b/regression-test/suites/load/broker_load/test_array_load.groovy
index d74dd5cbe7..51d6859e92 100644
--- a/regression-test/suites/load/broker_load/test_array_load.groovy
+++ b/regression-test/suites/load/broker_load/test_array_load.groovy
@@ -65,7 +65,7 @@ suite("test_array_load", "load") {
     }
 
     def load_array_data = {strip_flag, read_flag, format_flag, exprs, json_paths, 
-                            json_root, where_expr, fuzzy_flag, file_name ->
+                            json_root, where_expr, fuzzy_flag, column_sep, file_name ->
         // load the json data
         streamLoad {
             table "tbl_test_array_load"
@@ -79,6 +79,7 @@ suite("test_array_load", "load") {
             set 'json_root', json_root
             set 'where', where_expr
             set 'fuzzy_parse', fuzzy_flag
+            set 'column_separator', column_sep
             file file_name // import json file
             time 10000 // limit inflight 10s
 
@@ -101,7 +102,27 @@ suite("test_array_load", "load") {
         def result1= sql """
                         LOAD LABEL ${label} (
                             DATA INFILE("${hdfsFilePath}")
-                            INTO TABLE ${testTablex} 
+                            INTO TABLE ${testTablex}
+                            FORMAT as "${format}")
+                        with BROKER "${brokerName}" (
+                        "username"="${hdfsUser}",
+                        "password"="${hdfsPasswd}")
+                        PROPERTIES  (
+                        "timeout"="1200",
+                        "max_filter_ratio"="0.1");
+                        """
+        
+        assertTrue(result1.size() == 1)
+        assertTrue(result1[0].size() == 1)
+        assertTrue(result1[0][0] == 0, "Query OK, 0 rows affected")
+    }
+
+    def load_from_hdfs1 = {testTablex, label, hdfsFilePath, format, brokerName, hdfsUser, hdfsPasswd ->
+        def result1= sql """
+                        LOAD LABEL ${label} (
+                            DATA INFILE("${hdfsFilePath}")
+                            INTO TABLE ${testTablex}
+                            COLUMNS TERMINATED BY "/"
                             FORMAT as "${format}")
                         with BROKER "${brokerName}" (
                         "username"="${hdfsUser}",
@@ -116,13 +137,30 @@ suite("test_array_load", "load") {
         assertTrue(result1[0][0] == 0, "Query OK, 0 rows affected")
     }
     
+    def check_load_result = {checklabel, testTablex ->
+        max_try_milli_secs = 10000
+        while(max_try_milli_secs) {
+            result = sql "show load where label = '${checklabel}'"
+            if(result[0][2] == "FINISHED") {
+                qt_select "select * from ${testTablex} order by k1"
+                break
+            } else {
+                sleep(1000) // wait 1 second every time
+                max_try_milli_secs -= 1000
+                if(max_try_milli_secs <= 0) {
+                    assertEquals(1, 2)
+                }
+            }
+        }
+    }
+
     // case1: import array data in json format and enable vectorized engine
     try {
         sql "DROP TABLE IF EXISTS ${testTable}"
         
         create_test_table.call(testTable, true)
 
-        load_array_data.call('true', '', 'json', '', '', '', '', '', 'simple_array.json')
+        load_array_data.call('true', '', 'json', '', '', '', '', '', '', 'simple_array.json')
         
         // select the table and check whether the data is correct
         qt_select "select * from ${testTable} order by k1"
@@ -137,7 +175,37 @@ suite("test_array_load", "load") {
         
         create_test_table.call(testTable, false)
 
-        load_array_data.call('true', '', 'json', '', '', '', '', '', 'simple_array.json')
+        load_array_data.call('true', '', 'json', '', '', '', '', '', '', 'simple_array.json')
+        
+        // select the table and check whether the data is correct
+        qt_select "select * from ${testTable} order by k1"
+
+    } finally {
+        try_sql("DROP TABLE IF EXISTS ${testTable}")
+    }
+    
+    // case3: import array data in csv format and enable vectorized engine
+    try {
+        sql "DROP TABLE IF EXISTS ${testTable}"
+        
+        create_test_table.call(testTable, true)
+
+        load_array_data.call('true', '', 'csv', '', '', '', '', '', '/', 'simple_array.csv')
+        
+        // select the table and check whether the data is correct
+        qt_select "select * from ${testTable} order by k1"
+
+    } finally {
+        try_sql("DROP TABLE IF EXISTS ${testTable}")
+    }
+
+    // case4: import array data in csv format and disable vectorized engine
+    try {
+        sql "DROP TABLE IF EXISTS ${testTable}"
+        
+        create_test_table.call(testTable, false)
+
+        load_array_data.call('true', '', 'csv', '', '', '', '', '', '/', 'simple_array.csv')
         
         // select the table and check whether the data is correct
         qt_select "select * from ${testTable} order by k1"
@@ -152,44 +220,134 @@ suite("test_array_load", "load") {
         brokerName =getBrokerName()
         hdfsUser = getHdfsUser()
         hdfsPasswd = getHdfsPasswd()
-        def hdfs_file_path = uploadToHdfs "broker_load/simple_object_array.json"
-        def format = "json" 
-
-        // case3: import array data by hdfs and enable vectorized engine
+        def hdfs_json_file_path = uploadToHdfs "broker_load/simple_object_array.json"
+        def hdfs_csv_file_path = uploadToHdfs "broker_load/simple_array.csv"
+        def hdfs_orc_file_path = uploadToHdfs "broker_load/simple_array.orc"
+        def hdfs_parquet_file_path = uploadToHdfs "broker_load/simple_array.parquet"
+ 
+        // case5: import array data by hdfs and enable vectorized engine
         try {
             sql "DROP TABLE IF EXISTS ${testTable}"
             
             create_test_table.call(testTable, true)
 
             def test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
-            load_from_hdfs.call(testTable, test_load_label, hdfs_file_path, format,
+            load_from_hdfs.call(testTable, test_load_label, hdfs_json_file_path, "json",
+                                brokerName, hdfsUser, hdfsPasswd)
+            
+            check_load_result.call(test_load_label, testTable)
+
+        } finally {
+            try_sql("DROP TABLE IF EXISTS ${testTable}")
+        }
+
+        // case6: import array data by hdfs and disable vectorized engine
+        try {
+            sql "DROP TABLE IF EXISTS ${testTable}"
+            
+            create_test_table.call(testTable, false)
+
+            def test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
+            load_from_hdfs.call(testTable, test_load_label, hdfs_json_file_path, "json",
                                 brokerName, hdfsUser, hdfsPasswd)
             
-            // wait to load finished
-            sleep(5000)
+            check_load_result.call(test_load_label, testTable)
+
+        } finally {
+            try_sql("DROP TABLE IF EXISTS ${testTable}")
+        }
+
+        // case7: import array data by hdfs in csv format and enable vectorized
+        try {
+            sql "DROP TABLE IF EXISTS ${testTable}"
+
+            create_test_table.call(testTable, true)
+
+            def test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
+            load_from_hdfs1.call(testTable, test_load_label, hdfs_csv_file_path, "csv",
+                                brokerName, hdfsUser, hdfsPasswd)
+                        
+            check_load_result.call(test_load_label, testTable)
+
+        } finally {
+            try_sql("DROP TABLE IF EXISTS ${testTable}")
+        }
+
+        // case8: import array data by hdfs in csv format and disable vectorized
+        try {
+            sql "DROP TABLE IF EXISTS ${testTable}"
+
+            create_test_table.call(testTable, false)
+
+            def test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
+            load_from_hdfs1.call(testTable, test_load_label, hdfs_csv_file_path, "csv",
+                                brokerName, hdfsUser, hdfsPasswd)
             
-            // select the table and check whether the data is correct
-            qt_select "select * from ${testTable} order by k1"
+            check_load_result.call(test_load_label, testTable)
 
         } finally {
             try_sql("DROP TABLE IF EXISTS ${testTable}")
         }
 
-        // case4: import array data by hdfs and disable vectorized engine
+        // case9: import array data by hdfs in orc format and enable vectorized
         try {
             sql "DROP TABLE IF EXISTS ${testTable}"
+
+            create_test_table.call(testTable, true)
+
+            def test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
+            load_from_hdfs1.call(testTable, test_load_label, hdfs_orc_file_path, "orc",
+                                brokerName, hdfsUser, hdfsPasswd)
             
+            check_load_result.call(test_load_label, testTable)
+
+        } finally {
+            try_sql("DROP TABLE IF EXISTS ${testTable}")
+        }
+
+        // case10: import array data by hdfs in orc format and disable vectorized
+        try {
+            sql "DROP TABLE IF EXISTS ${testTable}"
+
             create_test_table.call(testTable, false)
 
             def test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
-            load_from_hdfs.call(testTable, test_load_label, hdfs_file_path, format,
+            load_from_hdfs1.call(testTable, test_load_label, hdfs_orc_file_path, "orc",
                                 brokerName, hdfsUser, hdfsPasswd)
             
-            // wait to load finished
-            sleep(5000)
+            check_load_result.call(test_load_label, testTable)
+
+        } finally {
+            try_sql("DROP TABLE IF EXISTS ${testTable}")
+        }
+
+        // case11: import array data by hdfs in parquet format and enable vectorized
+        try {
+            sql "DROP TABLE IF EXISTS ${testTable}"
+
+            create_test_table.call(testTable, true)
+
+            def test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
+            load_from_hdfs1.call(testTable, test_load_label, hdfs_parquet_file_path, "parquet",
+                                brokerName, hdfsUser, hdfsPasswd)
+            
+            check_load_result.call(test_load_label, testTable)
+
+        } finally {
+            try_sql("DROP TABLE IF EXISTS ${testTable}")
+        }
+
+        // case12: import array data by hdfs in parquet format and disable vectorized
+        try {
+            sql "DROP TABLE IF EXISTS ${testTable}"
+
+            create_test_table.call(testTable, false)
+
+            def test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
+            load_from_hdfs1.call(testTable, test_load_label, hdfs_parquet_file_path, "parquet",
+                                brokerName, hdfsUser, hdfsPasswd)
             
-            // select the table and check whether the data is correct
-            qt_select "select * from ${testTable} order by k1"
+            check_load_result.call(test_load_label, testTable)
 
         } finally {
             try_sql("DROP TABLE IF EXISTS ${testTable}")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@doris.apache.org
For additional commands, e-mail: commits-help@doris.apache.org