You are viewing a plain text version of this content. The canonical link for it is here.
Posted to github@arrow.apache.org by GitBox <gi...@apache.org> on 2022/01/04 02:25:51 UTC

[GitHub] [arrow] shanhuuang commented on a change in pull request #12015: ARROW-15184: [C++] Unit tests of reading delta-encoded Parquet files with and without nulls

shanhuuang commented on a change in pull request #12015:
URL: https://github.com/apache/arrow/pull/12015#discussion_r777792198



##########
File path: cpp/src/parquet/arrow/arrow_reader_writer_test.cc
##########
@@ -4262,6 +4262,64 @@ TEST_F(TestArrowReadDeltaEncoding, IncrementalDecodeDeltaByteArray) {
   ASSERT_EQ(nullptr, actual_batch);
 }
 
+TEST_F(TestArrowReadDeltaEncoding, RequiredColumn) {
+  std::shared_ptr<::arrow::Table> actual_table, expect_table;
+  ReadTableFromParquetFile("delta_encoding_required_column.parquet", &actual_table);
+
+  auto convert_options = ::arrow::csv::ConvertOptions::Defaults();
+  convert_options.column_types = {
+      {"c_customer_sk", ::arrow::int32()},
+      {"c_current_cdemo_sk", ::arrow::int32()},
+      {"c_current_hdemo_sk", ::arrow::int32()},
+      {"c_current_addr_sk", ::arrow::int32()},
+      {"c_first_shipto_date_sk", ::arrow::int32()},
+      {"c_first_sales_date_sk", ::arrow::int32()},
+      {"c_birth_day", ::arrow::int32()},
+      {"c_birth_month", ::arrow::int32()},
+      {"c_birth_year", ::arrow::int32()},
+      {"c_customer_id", ::arrow::utf8()},
+      {"c_salutation", ::arrow::utf8()},
+      {"c_first_name",  ::arrow::utf8()},
+      {"c_last_name", ::arrow::utf8()},
+      {"c_preferred_cust_flag", ::arrow::utf8()},
+      {"c_birth_country", ::arrow::utf8()},
+      {"c_login", ::arrow::utf8()},
+      {"c_email_address", ::arrow::utf8()},
+      {"c_last_review_date", ::arrow::utf8()}
+  };
+  ReadTableFromCSVFile("delta_encoding_required_column_expect.csv", convert_options, &expect_table);
+  ::arrow::AssertTablesEqual(*actual_table, *expect_table, false);
+}
+
+TEST_F(TestArrowReadDeltaEncoding, OptionalColumn) {
+  std::shared_ptr<::arrow::Table> actual_table, expect_table;
+  ReadTableFromParquetFile("delta_encoding_optional_column.parquet", &actual_table);
+
+  auto convert_options = ::arrow::csv::ConvertOptions::Defaults();
+  convert_options.column_types = {
+      {"c_customer_sk", ::arrow::int64()},
+      {"c_current_cdemo_sk", ::arrow::int64()},
+      {"c_current_hdemo_sk", ::arrow::int64()},
+      {"c_current_addr_sk", ::arrow::int64()},
+      {"c_first_shipto_date_sk", ::arrow::int64()},
+      {"c_first_sales_date_sk", ::arrow::int64()},
+      {"c_birth_day", ::arrow::int64()},
+      {"c_birth_month", ::arrow::int64()},
+      {"c_birth_year", ::arrow::int64()},

Review comment:
       No, I set the StructField type to IntegerType(which should be LongType) by mistake in pyspark when generating required-columns parquet file...




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: github-unsubscribe@arrow.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org