You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@impala.apache.org by mi...@apache.org on 2023/07/13 15:21:52 UTC

[impala] 02/02: IMPALA-12275: Read files written with DeflateCodec

This is an automated email from the ASF dual-hosted git repository.

michaelsmith pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git

commit fbd8664b6b4d4b5d3df4290dc2309227803e245c
Author: Michael Smith <mi...@cloudera.com>
AuthorDate: Mon Jul 10 12:13:37 2023 -0700

    IMPALA-12275: Read files written with DeflateCodec
    
    DeflateCodec is an alias to DefaultCodec. Impala works with
    DefaultCodec. Fixes reading files written with DeflateCodec.
    
    DeflateCodec isn't an issue with text files because they don't include a
    codec header. Sequence files do, which we check on decompress.
    
    Moves TestTextInterop to a E2E test since it doesn't require any special
    startup options and refactors out test running to be format-agnostic.
    Updates text file test as IMPALA-8721 is fixed. Removes creating a table
    in Impala for Hive to read, as it didn't test anything new. Adds tests
    for sequence files; excludes reading zstd due to IMPALA-12276.
    
    Testing:
    - manual exhaustive run of updated tests
    
    Change-Id: Id5ec1d0345ae35597f6aade9d8b9eef2257efeba
    Reviewed-on: http://gerrit.cloudera.org:8080/20181
    Reviewed-by: Joe McDonnell <jo...@cloudera.com>
    Tested-by: Michael Smith <mi...@cloudera.com>
---
 be/src/util/codec.cc                               |   4 +
 be/src/util/codec.h                                |   1 +
 .../custom_cluster/test_hive_text_codec_interop.py | 115 --------------------
 tests/query_test/test_hive_codec_interop.py        | 121 +++++++++++++++++++++
 4 files changed, 126 insertions(+), 115 deletions(-)

diff --git a/be/src/util/codec.cc b/be/src/util/codec.cc
index b18f5adb3..4b9811c25 100644
--- a/be/src/util/codec.cc
+++ b/be/src/util/codec.cc
@@ -40,6 +40,9 @@ using namespace strings;
 
 const char* const Codec::DEFAULT_COMPRESSION =
     "org.apache.hadoop.io.compress.DefaultCodec";
+// An alias for DefaultCodec
+const char* const Codec::DEFLATE_COMPRESSION =
+    "org.apache.hadoop.io.compress.DeflateCodec";
 const char* const Codec::GZIP_COMPRESSION = "org.apache.hadoop.io.compress.GzipCodec";
 const char* const Codec::BZIP2_COMPRESSION = "org.apache.hadoop.io.compress.BZip2Codec";
 const char* const Codec::SNAPPY_COMPRESSION = "org.apache.hadoop.io.compress.SnappyCodec";
@@ -53,6 +56,7 @@ const char* const NO_LZO_MSG = "LZO codecs may not be created via the Codec inte
 
 const Codec::CodecMap Codec::CODEC_MAP = {{"", THdfsCompression::NONE},
     {DEFAULT_COMPRESSION, THdfsCompression::DEFAULT},
+    {DEFLATE_COMPRESSION, THdfsCompression::DEFAULT},
     {GZIP_COMPRESSION, THdfsCompression::GZIP},
     {BZIP2_COMPRESSION, THdfsCompression::BZIP2},
     {SNAPPY_COMPRESSION, THdfsCompression::SNAPPY_BLOCKED},
diff --git a/be/src/util/codec.h b/be/src/util/codec.h
index 5b40e2670..589746365 100644
--- a/be/src/util/codec.h
+++ b/be/src/util/codec.h
@@ -44,6 +44,7 @@ class Codec {
  public:
   /// These are the codec string representations used in Hadoop.
   static const char* const DEFAULT_COMPRESSION;
+  static const char* const DEFLATE_COMPRESSION;
   static const char* const GZIP_COMPRESSION;
   static const char* const BZIP2_COMPRESSION;
   static const char* const SNAPPY_COMPRESSION;
diff --git a/tests/custom_cluster/test_hive_text_codec_interop.py b/tests/custom_cluster/test_hive_text_codec_interop.py
deleted file mode 100644
index e5223be3a..000000000
--- a/tests/custom_cluster/test_hive_text_codec_interop.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# Tests for Hive-IMPALA text compression codec interoperability
-
-from __future__ import absolute_import, division, print_function
-import pytest
-
-from tests.common.custom_cluster_test_suite import CustomClusterTestSuite
-from tests.common.environ import HIVE_MAJOR_VERSION
-from tests.common.skip import SkipIfFS
-from tests.common.test_dimensions import create_exec_option_dimension
-from tests.common.test_result_verifier import verify_query_result_is_equal
-
-# compression codecs impala support reading in text file type
-TEXT_CODECS = ['snappy', 'gzip', 'zstd', 'bzip2', 'deflate', 'default']
-
-
-class TestTextInterop(CustomClusterTestSuite):
-
-  @classmethod
-  def get_workload(self):
-    return 'functional-query'
-
-  @classmethod
-  def setup_class(cls):
-    if cls.exploration_strategy() != 'exhaustive':
-      pytest.skip('runs only in exhaustive')
-    super(TestTextInterop, cls).setup_class()
-
-  @classmethod
-  def add_test_dimensions(cls):
-    super(CustomClusterTestSuite, cls).add_test_dimensions()
-    # Fix the exec_option vector to have a single value.
-    cls.ImpalaTestMatrix.add_dimension(create_exec_option_dimension(
-        cluster_sizes=[0], disable_codegen_options=[False], batch_sizes=[0],
-        sync_ddl=[1]))
-    cls.ImpalaTestMatrix.add_constraint(
-        lambda v: v.get_value('table_format').file_format == 'textfile')
-
-  @SkipIfFS.hive
-  @pytest.mark.execute_serially
-  def test_hive_impala_interop(self, unique_database, cluster_properties):
-    """Tests compressed text file written by Hive with different codecs
-    can be read from impala. And verify results."""
-    # Setup source table.
-    source_table = "{0}.{1}".format(unique_database, "t1_source")
-    # TODO: Once IMPALA-8721 is fixed add coverage for TimeStamp data type.
-    self.execute_query_expect_success(self.client,
-        "create table {0} stored as textfile as select id, bool_col, tinyint_col, "
-        "smallint_col, int_col, bigint_col, float_col, double_col, date_string_col,"
-        "string_col, year, month from functional_parquet.alltypes".format(source_table))
-    self.execute_query_expect_success(self.client,
-        "insert into {0}(id) values (7777), (8888), (9999), (11111), (22222), (33333)"
-        .format(source_table))
-
-    # For Hive 3+, workaround for HIVE-22371 (CTAS puts files in the wrong place) by
-    # explicitly creating an external table so that files are in the external warehouse
-    # directory. Use external.table.purge=true so that it is equivalent to a Hive 2
-    # managed table. Hive 2 stays the same.
-    external = ""
-    tblproperties = ""
-    if HIVE_MAJOR_VERSION >= 3:
-      external = "external"
-      tblproperties = "TBLPROPERTIES('external.table.purge'='TRUE')"
-    # Loop through the compression codecs and run interop tests.
-    for codec in TEXT_CODECS:
-      # Write data in Hive and read from Impala
-      # switch codec to format hive can accept
-      switcher = {
-          'snappy': 'org.apache.hadoop.io.compress.SnappyCodec',
-          'gzip': 'org.apache.hadoop.io.compress.GzipCodec',
-          'zstd': 'org.apache.hadoop.io.compress.ZStandardCodec',
-          'bzip2': 'org.apache.hadoop.io.compress.BZip2Codec',
-          'deflate': 'org.apache.hadoop.io.compress.DeflateCodec',
-          'default': 'org.apache.hadoop.io.compress.DefaultCodec'
-      }
-      hive_table = "{0}.{1}".format(unique_database, "t1_hive")
-      self.run_stmt_in_hive("drop table if exists {0}".format(hive_table))
-      self.run_stmt_in_hive("set hive.exec.compress.output=true;\
-          set mapreduce.output.fileoutputformat.compress.codec={0};\
-          create {1} table {2} stored as textfile {3} as select * from {4}"
-          .format(switcher.get(codec, 'Invalid codec'), external, hive_table,
-          tblproperties, source_table))
-
-      # Make sure hive CTAS table is not empty
-      assert self.run_stmt_in_hive("select count(*) from {0}".format(
-          hive_table)).split("\n")[1] != "0", "CTAS created Hive table is empty."
-
-      # Make sure Impala's metadata is in sync.
-      if cluster_properties.is_catalog_v2_cluster():
-        self.wait_for_table_to_appear(unique_database, hive_table, timeout_s=10)
-      else:
-        self.client.execute("invalidate metadata {0}".format(hive_table))
-
-      # Read Hive data in Impala and verify results.
-      base_result = self.execute_query_expect_success(self.client,
-          "select * from {0} order by id".format(source_table))
-      test_result = self.execute_query_expect_success(self.client,
-          "select * from {0} order by id".format(hive_table))
-      verify_query_result_is_equal(test_result.data, base_result.data)
diff --git a/tests/query_test/test_hive_codec_interop.py b/tests/query_test/test_hive_codec_interop.py
new file mode 100644
index 000000000..957c8ca1d
--- /dev/null
+++ b/tests/query_test/test_hive_codec_interop.py
@@ -0,0 +1,121 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# Tests for Hive-IMPALA compression codec interoperability
+
+from __future__ import absolute_import, division, print_function
+import pytest
+
+from tests.common.environ import HIVE_MAJOR_VERSION
+from tests.common.impala_test_suite import ImpalaTestSuite
+from tests.common.skip import SkipIfFS
+from tests.common.test_dimensions import create_exec_option_dimension
+from tests.common.test_result_verifier import verify_query_result_is_equal
+from tests.common.test_vector import ImpalaTestDimension, ImpalaTestMatrix
+
+# compression codecs impala support reading in text file type. Names use Hive convention.
+TEXT_CODECS = ["Snappy", "Gzip", "ZStandard", "BZip2", "Deflate", "Default"]
+
+# compression codecs impala support reading in sequence file type
+SEQUENCE_CODECS = list(TEXT_CODECS)
+# Omit zstd due to IMPALA-12276.
+SEQUENCE_CODECS.remove("ZStandard")
+
+
+class TestFileCodecInterop(ImpalaTestSuite):
+  @classmethod
+  def get_workload(self):
+    return 'functional-query'
+
+  @classmethod
+  def setup_class(cls):
+    if cls.exploration_strategy() != 'exhaustive':
+      pytest.skip('runs only in exhaustive')
+    super(TestFileCodecInterop, cls).setup_class()
+
+  @classmethod
+  def add_test_dimensions(cls):
+    cls.ImpalaTestMatrix = ImpalaTestMatrix()
+    # Fix the exec_option vector to have a single value.
+    cls.ImpalaTestMatrix.add_dimension(create_exec_option_dimension(sync_ddl=[1],
+        cluster_sizes=[0], disable_codegen_options=[False], batch_sizes=[0]))
+
+  def verify_codec(self, unique_database, cluster_properties, format, codec):
+    # For Hive 3+, workaround for HIVE-22371 (CTAS puts files in the wrong place) by
+    # explicitly creating an external table so that files are in the external warehouse
+    # directory. Use external.table.purge=true so that it is equivalent to a Hive 2
+    # managed table. Hive 2 stays the same.
+    external = ""
+    tblproperties = ""
+    if HIVE_MAJOR_VERSION >= 3:
+      external = "external"
+      tblproperties = "TBLPROPERTIES('external.table.purge'='TRUE')"
+
+    # Write data in Hive and read from Impala
+    source_table = "functional_parquet.alltypes"
+    hive_table = "{0}.{1}".format(unique_database, "t1_hive")
+    self.run_stmt_in_hive("drop table if exists {0}".format(hive_table))
+    self.run_stmt_in_hive("set hive.exec.compress.output=true;\
+        set mapreduce.output.fileoutputformat.compress.codec=\
+            org.apache.hadoop.io.compress.{0}Codec;\
+        create {1} table {2} stored as {3} {4} as select * from {5}"
+        .format(codec, external, hive_table, format, tblproperties, source_table))
+
+    # Make sure hive CTAS table is not empty
+    assert self.run_stmt_in_hive("select count(*) from {0}".format(
+        hive_table)).split("\n")[1] != "0", "CTAS created Hive table is empty."
+
+    # Make sure Impala's metadata is in sync.
+    if cluster_properties.is_catalog_v2_cluster():
+      self.wait_for_table_to_appear(unique_database, hive_table, timeout_s=10)
+    else:
+      self.client.execute("invalidate metadata {0}".format(hive_table))
+
+    # Read Hive data in Impala and verify results.
+    base_result = self.execute_query_expect_success(self.client,
+        "select * from {0} order by id".format(source_table))
+    test_result = self.execute_query_expect_success(self.client,
+        "select * from {0} order by id".format(hive_table))
+    verify_query_result_is_equal(test_result.data, base_result.data)
+
+
+class TestTextInterop(TestFileCodecInterop):
+  @classmethod
+  def add_test_dimensions(cls):
+    super(TestTextInterop, cls).add_test_dimensions()
+    cls.ImpalaTestMatrix.add_dimension(ImpalaTestDimension('codec', *TEXT_CODECS))
+
+  @SkipIfFS.hive
+  def test_hive_impala_interop(self, vector, unique_database, cluster_properties):
+    """Tests compressed text file written by Hive with different codecs
+    can be read from impala. And verify results."""
+    self.verify_codec(
+        unique_database, cluster_properties, 'textfile', vector.get_value('codec'))
+
+
+class TestSequenceInterop(TestFileCodecInterop):
+  @classmethod
+  def add_test_dimensions(cls):
+    super(TestSequenceInterop, cls).add_test_dimensions()
+    cls.ImpalaTestMatrix.add_dimension(ImpalaTestDimension('codec', *SEQUENCE_CODECS))
+
+  @SkipIfFS.hive
+  def test_hive_impala_interop(self, vector, unique_database, cluster_properties):
+    """Tests compressed sequence file written by Hive with different codecs
+    can be read from impala. And verify results."""
+    self.verify_codec(
+        unique_database, cluster_properties, 'sequencefile', vector.get_value('codec'))