You are viewing a plain text version of this content. The canonical link for it is here.
Posted to github@arrow.apache.org by GitBox <gi...@apache.org> on 2020/09/13 05:21:42 UTC

[GitHub] [arrow] emkornfield commented on a change in pull request #8177: ARROW-8494: [C++] Full support for mixed lista and structs

emkornfield commented on a change in pull request #8177:
URL: https://github.com/apache/arrow/pull/8177#discussion_r487483180



##########
File path: cpp/src/parquet/level_conversion.cc
##########
@@ -18,176 +18,169 @@
 
 #include <algorithm>
 #include <limits>
-#if defined(ARROW_HAVE_BMI2)
-#include <x86intrin.h>
-#endif
 
+#include "arrow/util/bit_run_reader.h"
 #include "arrow/util/bit_util.h"
+#include "arrow/util/cpu_info.h"
 #include "arrow/util/logging.h"
 #include "parquet/exception.h"
+#include "parquet/level_comparison.h"
+
+#define BMI_RUNTIME_VERSION standard
+#include "parquet/level_conversion_inc.h"
+#undef BMI_RUNTIME_VERSION
 
 namespace parquet {
 namespace internal {
 namespace {
-inline void CheckLevelRange(const int16_t* levels, int64_t num_levels,
-                            const int16_t max_expected_level) {
-  int16_t min_level = std::numeric_limits<int16_t>::max();
-  int16_t max_level = std::numeric_limits<int16_t>::min();
-  for (int x = 0; x < num_levels; x++) {
-    min_level = std::min(levels[x], min_level);
-    max_level = std::max(levels[x], max_level);
-  }
-  if (ARROW_PREDICT_FALSE(num_levels > 0 &&
-                          (min_level < 0 || max_level > max_expected_level))) {
-    throw ParquetException("definition level exceeds maximum");
-  }
-}
-
-#if !defined(ARROW_HAVE_AVX512)
 
-inline void DefinitionLevelsToBitmapScalar(
-    const int16_t* def_levels, int64_t num_def_levels, const int16_t max_definition_level,
-    const int16_t max_repetition_level, int64_t* values_read, int64_t* null_count,
-    uint8_t* valid_bits, int64_t valid_bits_offset) {
-  // We assume here that valid_bits is large enough to accommodate the
-  // additional definition levels and the ones that have already been written
-  ::arrow::internal::BitmapWriter valid_bits_writer(valid_bits, valid_bits_offset,
-                                                    num_def_levels);
-
-  // TODO(itaiin): As an interim solution we are splitting the code path here
-  // between repeated+flat column reads, and non-repeated+nested reads.
-  // Those paths need to be merged in the future
-  for (int i = 0; i < num_def_levels; ++i) {
-    if (def_levels[i] == max_definition_level) {
+using ::arrow::internal::CpuInfo;
+
+#if !defined(ARROW_HAVE_RUNTIME_BMI2)
+void DefinitionLevelsToBitmapScalar(const int16_t* def_levels, int64_t num_def_levels,
+                                    LevelInfo level_info, int64_t* values_read,
+                                    int64_t* null_count, uint8_t* valid_bits,
+                                    int64_t valid_bits_offset) {
+  ::arrow::internal::FirstTimeBitmapWriter valid_bits_writer(
+      valid_bits,
+      /*start_offset=*/valid_bits_offset,
+      /*length=*/num_def_levels);
+  for (int x = 0; x < num_def_levels; x++) {
+    if (def_levels[x] < level_info.repeated_ancestor_def_level) {
+      continue;
+    }
+    if (def_levels[x] >= level_info.def_level) {
       valid_bits_writer.Set();
-    } else if (max_repetition_level > 0) {
-      // repetition+flat case
-      if (def_levels[i] == (max_definition_level - 1)) {
-        valid_bits_writer.Clear();
-        *null_count += 1;
-      } else {
-        continue;
-      }
     } else {
-      // non-repeated+nested case
-      if (def_levels[i] < max_definition_level) {
-        valid_bits_writer.Clear();
-        *null_count += 1;
-      } else {
-        throw ParquetException("definition level exceeds maximum");
-      }
+      valid_bits_writer.Clear();
+      *null_count += 1;
     }
-
     valid_bits_writer.Next();
   }
   valid_bits_writer.Finish();
   *values_read = valid_bits_writer.position();
+  if (*null_count > 0 && level_info.null_slot_usage > 1) {
+    throw ParquetException(
+        "Null values with null_slot_usage > 1 not supported."
+        "(i.e. FixedSizeLists with null values are not supported");
+  }
 }
 #endif
 
-template <bool has_repeated_parent>
-int64_t DefinitionLevelsBatchToBitmap(const int16_t* def_levels, const int64_t batch_size,
-                                      const int16_t required_definition_level,
-                                      ::arrow::internal::FirstTimeBitmapWriter* writer) {
-  CheckLevelRange(def_levels, batch_size, required_definition_level);
-  uint64_t defined_bitmap =
-      internal::GreaterThanBitmap(def_levels, batch_size, required_definition_level - 1);
-
-  DCHECK_LE(batch_size, 64);
-  if (has_repeated_parent) {
-#if defined(ARROW_HAVE_BMI2)
-    // This is currently a specialized code path assuming only (nested) lists
-    // present through the leaf (i.e. no structs). Upper level code only calls
-    // this method when the leaf-values are nullable (otherwise no spacing is needed),
-    // Because only nested lists exists it is sufficient to know that the field
-    // was either null or included it (i.e. definition level > max_definitation_level
-    // -2) If there where structs mixed in, we need to know the def_level of the
-    // repeated parent so we can check for def_level > "def level of repeated parent".
-    uint64_t present_bitmap = internal::GreaterThanBitmap(def_levels, batch_size,
-                                                          required_definition_level - 2);
-    uint64_t selected_bits = _pext_u64(defined_bitmap, present_bitmap);
-    writer->AppendWord(selected_bits, ::arrow::BitUtil::PopCount(present_bitmap));
-    return ::arrow::BitUtil::PopCount(selected_bits);
-#else
-    assert(false && "must not execute this without BMI2");
-#endif
-  } else {
-    writer->AppendWord(defined_bitmap, batch_size);
-    return ::arrow::BitUtil::PopCount(defined_bitmap);
+template <typename LengthType>
+void DefRepLevelsToListInfo(const int16_t* def_levels, const int16_t* rep_levels,
+                            int64_t num_def_levels, LevelInfo level_info,
+                            ValidityBitmapInputOutput* output, LengthType* lengths) {
+  LengthType* orig_pos = lengths;
+  std::unique_ptr<::arrow::internal::FirstTimeBitmapWriter> valid_bits_writer;
+  if (output->valid_bits) {
+    valid_bits_writer.reset(new ::arrow::internal::FirstTimeBitmapWriter(
+        output->valid_bits, output->valid_bits_offset, num_def_levels));
   }
-}
+  for (int x = 0; x < num_def_levels; x++) {
+    // Skip items that belong to empty ancenstor lists and futher nested lists.
+    if (def_levels[x] < level_info.repeated_ancestor_def_level ||
+        rep_levels[x] > level_info.rep_level) {
+      continue;
+    }
+    if (rep_levels[x] == level_info.rep_level) {
+      // A continuation of an existing list.
+      if (lengths != nullptr) {
+        *lengths += 1;
+      }
+    } else {
+      // current_rep < list rep_level i.e. start of a list (ancenstor empty lists are
+      // filtered out above).
+      if (lengths != nullptr) {
+        ++lengths;
+        *lengths = (def_levels[x] >= level_info.def_level) ? 1 : 0;

Review comment:
       we should consider doing the sume of the previous element here.  Originally I did not because I thought at some point getting raw lengths would make it easier to handled chunked_arrays in reader.cc but I think that case is esoteric enough that removing the need to touch this data twice will be better.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org