You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by jo...@apache.org on 2021/04/18 14:40:10 UTC

[arrow-rs] branch master updated (9a4ef46 -> a889eba)

This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git.


    from 9a4ef46  ARROW-12432: [Rust] [DataFusion] Add metrics to SortExec
     new ff4212c  Removed Ruby.
     new d131362  Removed R language.
     new 4d14b30  Removed Python.
     new db557f2  Removed matlab.
     new 31f7e93  Removed julia.
     new 70125b1  Removed js.
     new f864f41  Removed Java.
     new 8d9f27f  Removed go.
     new 1030fc2  Removed docs.
     new a8a6520  Removed csharp.
     new 92a3eec  Removed cpp.
     new 32a88cf  Removed c_glib.
     new 13af12d  Changed references to DF and Ballista in Cargo.
     new a889eba  Removed DataFusion and Ballista.

The 14 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .dockerignore                                      |     3 -
 .github/workflows/dev_pr/labeler.yml               |     6 -
 c_glib/.gitignore                                  |    70 -
 c_glib/Brewfile                                    |    22 -
 c_glib/Gemfile                                     |    23 -
 c_glib/README.md                                   |   315 -
 c_glib/arrow-cuda-glib/arrow-cuda-glib.h           |    24 -
 c_glib/arrow-cuda-glib/arrow-cuda-glib.hpp         |    24 -
 c_glib/arrow-cuda-glib/cuda.cpp                    |   944 -
 c_glib/arrow-cuda-glib/cuda.h                      |   183 -
 c_glib/arrow-cuda-glib/cuda.hpp                    |    54 -
 c_glib/arrow-cuda-glib/meson.build                 |    81 -
 c_glib/arrow-dataset-glib/arrow-dataset-glib.h     |    26 -
 c_glib/arrow-dataset-glib/arrow-dataset-glib.hpp   |    26 -
 c_glib/arrow-dataset-glib/file-format.cpp          |   265 -
 c_glib/arrow-dataset-glib/file-format.h            |    92 -
 c_glib/arrow-dataset-glib/file-format.hpp          |    30 -
 c_glib/arrow-dataset-glib/fragment.cpp             |   184 -
 c_glib/arrow-dataset-glib/fragment.h               |    58 -
 c_glib/arrow-dataset-glib/fragment.hpp             |    33 -
 c_glib/arrow-dataset-glib/meson.build              |    82 -
 c_glib/arrow-dataset-glib/scanner.cpp              |   522 -
 c_glib/arrow-dataset-glib/scanner.h                |    88 -
 c_glib/arrow-dataset-glib/scanner.hpp              |    35 -
 c_glib/arrow-glib/array-builder.cpp                |  6178 ----
 c_glib/arrow-glib/array-builder.h                  |  1387 -
 c_glib/arrow-glib/array-builder.hpp                |    27 -
 c_glib/arrow-glib/array.h                          |    23 -
 c_glib/arrow-glib/array.hpp                        |    23 -
 c_glib/arrow-glib/arrow-glib.h                     |    56 -
 c_glib/arrow-glib/arrow-glib.hpp                   |    51 -
 c_glib/arrow-glib/basic-array.cpp                  |  3011 --
 c_glib/arrow-glib/basic-array.h                    |   661 -
 c_glib/arrow-glib/basic-array.hpp                  |    47 -
 c_glib/arrow-glib/basic-data-type.cpp              |  1984 --
 c_glib/arrow-glib/basic-data-type.h                |   586 -
 c_glib/arrow-glib/basic-data-type.hpp              |    67 -
 c_glib/arrow-glib/buffer.cpp                       |   695 -
 c_glib/arrow-glib/buffer.h                         |   106 -
 c_glib/arrow-glib/buffer.hpp                       |    43 -
 c_glib/arrow-glib/chunked-array.cpp                |   370 -
 c_glib/arrow-glib/chunked-array.h                  |    66 -
 c_glib/arrow-glib/chunked-array.hpp                |    27 -
 c_glib/arrow-glib/codec.cpp                        |   263 -
 c_glib/arrow-glib/codec.h                          |    74 -
 c_glib/arrow-glib/codec.hpp                        |    34 -
 c_glib/arrow-glib/composite-array.cpp              |  1706 -
 c_glib/arrow-glib/composite-array.h                |   240 -
 c_glib/arrow-glib/composite-data-type.cpp          |   720 -
 c_glib/arrow-glib/composite-data-type.h            |   199 -
 c_glib/arrow-glib/compute.cpp                      |  3119 --
 c_glib/arrow-glib/compute.h                        |   530 -
 c_glib/arrow-glib/compute.hpp                      |    70 -
 c_glib/arrow-glib/data-type.h                      |    23 -
 c_glib/arrow-glib/data-type.hpp                    |    23 -
 c_glib/arrow-glib/datum.cpp                        |   683 -
 c_glib/arrow-glib/datum.h                          |   121 -
 c_glib/arrow-glib/datum.hpp                        |    42 -
 c_glib/arrow-glib/decimal.cpp                      |  1115 -
 c_glib/arrow-glib/decimal.h                        |   154 -
 c_glib/arrow-glib/decimal.hpp                      |    36 -
 c_glib/arrow-glib/enums.c.template                 |    52 -
 c_glib/arrow-glib/enums.h.template                 |    41 -
 c_glib/arrow-glib/error.cpp                        |   159 -
 c_glib/arrow-glib/error.h                          |    69 -
 c_glib/arrow-glib/error.hpp                        |    73 -
 c_glib/arrow-glib/field.cpp                        |   386 -
 c_glib/arrow-glib/field.h                          |    75 -
 c_glib/arrow-glib/field.hpp                        |    28 -
 c_glib/arrow-glib/file-mode.cpp                    |    59 -
 c_glib/arrow-glib/file-mode.h                      |    40 -
 c_glib/arrow-glib/file-mode.hpp                    |    27 -
 c_glib/arrow-glib/file-system.cpp                  |  1450 -
 c_glib/arrow-glib/file-system.h                    |   283 -
 c_glib/arrow-glib/file-system.hpp                  |    48 -
 c_glib/arrow-glib/file.cpp                         |   120 -
 c_glib/arrow-glib/file.h                           |    43 -
 c_glib/arrow-glib/file.hpp                         |    38 -
 c_glib/arrow-glib/gobject-type.h                   |   116 -
 c_glib/arrow-glib/input-stream.cpp                 |  1238 -
 c_glib/arrow-glib/input-stream.h                   |   227 -
 c_glib/arrow-glib/input-stream.hpp                 |    50 -
 c_glib/arrow-glib/internal-hash-table.hpp          |    41 -
 c_glib/arrow-glib/internal-index.hpp               |    37 -
 c_glib/arrow-glib/ipc-options.cpp                  |   529 -
 c_glib/arrow-glib/ipc-options.h                    |    66 -
 c_glib/arrow-glib/ipc-options.hpp                  |    32 -
 c_glib/arrow-glib/local-file-system.cpp            |   211 -
 c_glib/arrow-glib/local-file-system.h              |    60 -
 c_glib/arrow-glib/local-file-system.hpp            |    32 -
 c_glib/arrow-glib/meson.build                      |   274 -
 c_glib/arrow-glib/metadata-version.cpp             |    59 -
 c_glib/arrow-glib/metadata-version.h               |    41 -
 c_glib/arrow-glib/metadata-version.hpp             |    27 -
 c_glib/arrow-glib/orc-file-reader.cpp              |   445 -
 c_glib/arrow-glib/orc-file-reader.h                |    76 -
 c_glib/arrow-glib/orc-file-reader.hpp              |    31 -
 c_glib/arrow-glib/output-stream.cpp                |   763 -
 c_glib/arrow-glib/output-stream.h                  |   227 -
 c_glib/arrow-glib/output-stream.hpp                |    40 -
 c_glib/arrow-glib/readable.cpp                     |   114 -
 c_glib/arrow-glib/readable.h                       |    43 -
 c_glib/arrow-glib/readable.hpp                     |    39 -
 c_glib/arrow-glib/reader.cpp                       |  2187 --
 c_glib/arrow-glib/reader.h                         |   363 -
 c_glib/arrow-glib/reader.hpp                       |    56 -
 c_glib/arrow-glib/record-batch.cpp                 |   646 -
 c_glib/arrow-glib/record-batch.h                   |   112 -
 c_glib/arrow-glib/record-batch.hpp                 |    33 -
 c_glib/arrow-glib/schema.cpp                       |   440 -
 c_glib/arrow-glib/schema.h                         |    80 -
 c_glib/arrow-glib/schema.hpp                       |    27 -
 c_glib/arrow-glib/table-builder.cpp                |   337 -
 c_glib/arrow-glib/table-builder.h                  |    69 -
 c_glib/arrow-glib/table-builder.hpp                |    27 -
 c_glib/arrow-glib/table.cpp                        |   806 -
 c_glib/arrow-glib/table.h                          |   138 -
 c_glib/arrow-glib/table.hpp                        |    31 -
 c_glib/arrow-glib/tensor.cpp                       |   464 -
 c_glib/arrow-glib/tensor.h                         |    64 -
 c_glib/arrow-glib/tensor.hpp                       |    29 -
 c_glib/arrow-glib/type.cpp                         |   142 -
 c_glib/arrow-glib/type.h                           |   129 -
 c_glib/arrow-glib/type.hpp                         |    28 -
 c_glib/arrow-glib/version.h.in                     |   411 -
 c_glib/arrow-glib/writable-file.cpp                |    75 -
 c_glib/arrow-glib/writable-file.h                  |    39 -
 c_glib/arrow-glib/writable-file.hpp                |    39 -
 c_glib/arrow-glib/writable.cpp                     |    92 -
 c_glib/arrow-glib/writable.h                       |    40 -
 c_glib/arrow-glib/writable.hpp                     |    39 -
 c_glib/arrow-glib/writer.cpp                       |   334 -
 c_glib/arrow-glib/writer.h                         |   192 -
 c_glib/arrow-glib/writer.hpp                       |    33 -
 .../arrow-dataset-glib/arrow-dataset-glib-docs.xml |    66 -
 c_glib/doc/arrow-dataset-glib/entities.xml.in      |    24 -
 c_glib/doc/arrow-dataset-glib/meson.build          |    83 -
 c_glib/doc/arrow-glib/arrow-glib-docs.xml          |   259 -
 c_glib/doc/arrow-glib/entities.xml.in              |    24 -
 c_glib/doc/arrow-glib/meson.build                  |    93 -
 c_glib/doc/gandiva-glib/entities.xml.in            |    24 -
 c_glib/doc/gandiva-glib/gandiva-glib-docs.xml      |   128 -
 c_glib/doc/gandiva-glib/meson.build                |    83 -
 c_glib/doc/parquet-glib/entities.xml.in            |    24 -
 c_glib/doc/parquet-glib/meson.build                |    83 -
 c_glib/doc/parquet-glib/parquet-glib-docs.xml      |    93 -
 c_glib/doc/plasma-glib/entities.xml.in             |    24 -
 c_glib/doc/plasma-glib/meson.build                 |    86 -
 c_glib/doc/plasma-glib/plasma-glib-docs.xml        |    68 -
 c_glib/example/README.md                           |    48 -
 c_glib/example/build.c                             |    77 -
 c_glib/example/extension-type.c                    |   381 -
 c_glib/example/lua/README.md                       |    50 -
 c_glib/example/lua/meson.build                     |    28 -
 c_glib/example/lua/read-batch.lua                  |    44 -
 c_glib/example/lua/read-stream.lua                 |    51 -
 c_glib/example/lua/write-batch.lua                 |    74 -
 c_glib/example/lua/write-stream.lua                |    74 -
 c_glib/example/meson.build                         |    36 -
 c_glib/example/read-batch.c                        |   145 -
 c_glib/example/read-stream.c                       |   144 -
 c_glib/gandiva-glib/enums.c.template               |    52 -
 c_glib/gandiva-glib/enums.h.template               |    41 -
 c_glib/gandiva-glib/expression.cpp                 |   294 -
 c_glib/gandiva-glib/expression.h                   |    63 -
 c_glib/gandiva-glib/expression.hpp                 |    39 -
 c_glib/gandiva-glib/filter.cpp                     |   257 -
 c_glib/gandiva-glib/filter.h                       |    49 -
 c_glib/gandiva-glib/filter.hpp                     |    33 -
 c_glib/gandiva-glib/function-registry.cpp          |   116 -
 c_glib/gandiva-glib/function-registry.h            |    44 -
 c_glib/gandiva-glib/function-signature.cpp         |   243 -
 c_glib/gandiva-glib/function-signature.h           |    48 -
 c_glib/gandiva-glib/function-signature.hpp         |    27 -
 c_glib/gandiva-glib/gandiva-glib.h                 |    31 -
 c_glib/gandiva-glib/gandiva-glib.hpp               |    28 -
 c_glib/gandiva-glib/meson.build                    |   120 -
 c_glib/gandiva-glib/native-function.cpp            |   282 -
 c_glib/gandiva-glib/native-function.h              |    65 -
 c_glib/gandiva-glib/native-function.hpp            |    36 -
 c_glib/gandiva-glib/node.cpp                       |  1688 -
 c_glib/gandiva-glib/node.h                         |   395 -
 c_glib/gandiva-glib/node.hpp                       |    49 -
 c_glib/gandiva-glib/projector.cpp                  |   391 -
 c_glib/gandiva-glib/projector.h                    |    75 -
 c_glib/gandiva-glib/projector.hpp                  |    39 -
 c_glib/gandiva-glib/selection-vector.cpp           |   323 -
 c_glib/gandiva-glib/selection-vector.h             |   128 -
 c_glib/gandiva-glib/selection-vector.hpp           |    32 -
 c_glib/gandiva-glib/version.h.in                   |   218 -
 c_glib/meson.build                                 |   181 -
 c_glib/meson_options.txt                           |    38 -
 c_glib/parquet-glib/arrow-file-reader.cpp          |   386 -
 c_glib/parquet-glib/arrow-file-reader.h            |    74 -
 c_glib/parquet-glib/arrow-file-reader.hpp          |    29 -
 c_glib/parquet-glib/arrow-file-writer.cpp          |   579 -
 c_glib/parquet-glib/arrow-file-writer.h            |   125 -
 c_glib/parquet-glib/arrow-file-writer.hpp          |    33 -
 c_glib/parquet-glib/meson.build                    |    93 -
 c_glib/parquet-glib/parquet-glib.h                 |    25 -
 c_glib/parquet-glib/parquet-glib.hpp               |    25 -
 c_glib/parquet-glib/version.h.in                   |   218 -
 c_glib/plasma-glib/client.cpp                      |   608 -
 c_glib/plasma-glib/client.h                        |    98 -
 c_glib/plasma-glib/client.hpp                      |    29 -
 c_glib/plasma-glib/meson.build                     |   107 -
 c_glib/plasma-glib/object.cpp                      |   590 -
 c_glib/plasma-glib/object.h                        |    89 -
 c_glib/plasma-glib/object.hpp                      |    47 -
 c_glib/plasma-glib/plasma-glib.h                   |    23 -
 c_glib/plasma-glib/plasma-glib.hpp                 |    25 -
 c_glib/test/dataset/test-file-format.rb            |    34 -
 c_glib/test/dataset/test-in-memory-scan-task.rb    |    59 -
 c_glib/test/dataset/test-scan-options.rb           |    47 -
 c_glib/test/file-system-tests.rb                   |   383 -
 c_glib/test/fixture/TestOrcFile.test1.orc          |   Bin 1711 -> 0 bytes
 c_glib/test/gandiva/test-binary-literal-node.rb    |    47 -
 c_glib/test/gandiva/test-boolean-literal-node.rb   |    32 -
 c_glib/test/gandiva/test-boolean-node.rb           |    38 -
 c_glib/test/gandiva/test-condition.rb              |    35 -
 c_glib/test/gandiva/test-double-literal-node.rb    |    32 -
 c_glib/test/gandiva/test-expression.rb             |    46 -
 c_glib/test/gandiva/test-field-node.rb             |    37 -
 c_glib/test/gandiva/test-filter.rb                 |    51 -
 c_glib/test/gandiva/test-float-literal-node.rb     |    32 -
 c_glib/test/gandiva/test-function-node.rb          |    43 -
 c_glib/test/gandiva/test-function-registry.rb      |    45 -
 c_glib/test/gandiva/test-function-signature.rb     |   101 -
 c_glib/test/gandiva/test-if-node.rb                |    49 -
 c_glib/test/gandiva/test-int16-literal-node.rb     |    32 -
 c_glib/test/gandiva/test-int32-literal-node.rb     |    32 -
 c_glib/test/gandiva/test-int64-literal-node.rb     |    32 -
 c_glib/test/gandiva/test-int8-literal-node.rb      |    32 -
 c_glib/test/gandiva/test-native-function.rb        |   132 -
 c_glib/test/gandiva/test-null-literal-node.rb      |    38 -
 c_glib/test/gandiva/test-projector.rb              |    63 -
 c_glib/test/gandiva/test-selectable-projector.rb   |    74 -
 c_glib/test/gandiva/test-selection-vector.rb       |    42 -
 c_glib/test/gandiva/test-string-literal-node.rb    |    32 -
 c_glib/test/gandiva/test-uint16-literal-node.rb    |    32 -
 c_glib/test/gandiva/test-uint32-literal-node.rb    |    32 -
 c_glib/test/gandiva/test-uint64-literal-node.rb    |    32 -
 c_glib/test/gandiva/test-uint8-literal-node.rb     |    32 -
 c_glib/test/helper/buildable.rb                    |   234 -
 c_glib/test/helper/data-type.rb                    |    63 -
 c_glib/test/helper/fixture.rb                      |    24 -
 c_glib/test/helper/omittable.rb                    |    36 -
 c_glib/test/helper/plasma-store.rb                 |    57 -
 c_glib/test/parquet/test-arrow-file-reader.rb      |    65 -
 c_glib/test/parquet/test-arrow-file-writer.rb      |    46 -
 c_glib/test/parquet/test-writer-properties.rb      |   103 -
 c_glib/test/plasma/test-plasma-client-options.rb   |    31 -
 c_glib/test/plasma/test-plasma-client.rb           |    94 -
 c_glib/test/plasma/test-plasma-created-object.rb   |    59 -
 c_glib/test/plasma/test-plasma-referred-object.rb  |    54 -
 c_glib/test/run-test.rb                            |    80 -
 c_glib/test/run-test.sh                            |    59 -
 c_glib/test/test-array-builder.rb                  |  1944 --
 c_glib/test/test-array-datum.rb                    |    58 -
 c_glib/test/test-array-sort-options.rb             |    31 -
 c_glib/test/test-array.rb                          |   188 -
 c_glib/test/test-binary-array.rb                   |    61 -
 c_glib/test/test-binary-data-type.rb               |    33 -
 c_glib/test/test-boolean-array.rb                  |    81 -
 c_glib/test/test-boolean-data-type.rb              |    33 -
 c_glib/test/test-buffer-input-stream.rb            |   111 -
 c_glib/test/test-buffer-output-stream.rb           |    60 -
 c_glib/test/test-buffer.rb                         |   102 -
 c_glib/test/test-cast.rb                           |   145 -
 c_glib/test/test-chunked-array-datum.rb            |    58 -
 c_glib/test/test-chunked-array.rb                  |   141 -
 c_glib/test/test-codec.rb                          |    33 -
 c_glib/test/test-compare.rb                        |    69 -
 c_glib/test/test-compressed-input-stream.rb        |    45 -
 c_glib/test/test-compressed-output-stream.rb       |    43 -
 c_glib/test/test-count-values.rb                   |    51 -
 c_glib/test/test-count.rb                          |    46 -
 c_glib/test/test-csv-reader.rb                     |   241 -
 c_glib/test/test-cuda.rb                           |   159 -
 c_glib/test/test-date32-array.rb                   |    65 -
 c_glib/test/test-date32-data-type.rb               |    33 -
 c_glib/test/test-date64-array.rb                   |    65 -
 c_glib/test/test-date64-data-type.rb               |    33 -
 c_glib/test/test-decimal128-array.rb               |    37 -
 c_glib/test/test-decimal128-data-type.rb           |    43 -
 c_glib/test/test-decimal128.rb                     |   233 -
 c_glib/test/test-decimal256-array.rb               |    37 -
 c_glib/test/test-decimal256-data-type.rb           |    43 -
 c_glib/test/test-decimal256.rb                     |   220 -
 c_glib/test/test-dense-union-array.rb              |    88 -
 c_glib/test/test-dense-union-data-type.rb          |    64 -
 c_glib/test/test-dictionary-array-builder.rb       |   395 -
 c_glib/test/test-dictionary-array.rb               |    78 -
 c_glib/test/test-dictionary-data-type.rb           |    60 -
 c_glib/test/test-dictionary-encode.rb              |    62 -
 c_glib/test/test-double-array.rb                   |    60 -
 c_glib/test/test-double-data-type.rb               |    33 -
 c_glib/test/test-extension-data-type.rb            |   105 -
 c_glib/test/test-feather-file-reader.rb            |    71 -
 c_glib/test/test-field.rb                          |   116 -
 c_glib/test/test-file-info.rb                      |   170 -
 c_glib/test/test-file-output-stream.rb             |    38 -
 c_glib/test/test-file-selector.rb                  |    82 -
 c_glib/test/test-file-writer.rb                    |    85 -
 c_glib/test/test-filter.rb                         |   247 -
 c_glib/test/test-fixed-size-binary-array.rb        |    59 -
 c_glib/test/test-fixed-size-binary-data-type.rb    |    43 -
 c_glib/test/test-float-array.rb                    |    67 -
 c_glib/test/test-float-data-type.rb                |    33 -
 c_glib/test/test-function.rb                       |    64 -
 c_glib/test/test-gio-input-stream.rb               |    72 -
 c_glib/test/test-gio-output-stream.rb              |    79 -
 c_glib/test/test-int-array-builder.rb              |    59 -
 c_glib/test/test-int16-array.rb                    |    60 -
 c_glib/test/test-int16-data-type.rb                |    33 -
 c_glib/test/test-int32-array.rb                    |    58 -
 c_glib/test/test-int32-data-type.rb                |    33 -
 c_glib/test/test-int64-array.rb                    |    58 -
 c_glib/test/test-int64-data-type.rb                |    33 -
 c_glib/test/test-int8-array.rb                     |    65 -
 c_glib/test/test-int8-data-type.rb                 |    40 -
 c_glib/test/test-is-in.rb                          |    96 -
 c_glib/test/test-json-reader.rb                    |    90 -
 c_glib/test/test-large-binary-array.rb             |    61 -
 c_glib/test/test-large-binary-data-type.rb         |    33 -
 c_glib/test/test-large-list-array.rb               |    98 -
 c_glib/test/test-large-list-data-type.rb           |    48 -
 c_glib/test/test-large-string-array.rb             |    46 -
 c_glib/test/test-large-string-data-type.rb         |    33 -
 c_glib/test/test-list-array.rb                     |    97 -
 c_glib/test/test-list-data-type.rb                 |    48 -
 c_glib/test/test-local-file-system.rb              |    57 -
 c_glib/test/test-map-array-builder.rb              |   143 -
 c_glib/test/test-map-array.rb                      |    39 -
 c_glib/test/test-map-data-type.rb                  |    44 -
 c_glib/test/test-memory-mapped-input-stream.rb     |    84 -
 c_glib/test/test-mock-file-system.rb               |    30 -
 c_glib/test/test-mutable-buffer.rb                 |    74 -
 c_glib/test/test-null-array.rb                     |    33 -
 c_glib/test/test-null-data-type.rb                 |    33 -
 c_glib/test/test-numeric-array.rb                  |    26 -
 c_glib/test/test-orc-file-reader.rb                |   238 -
 c_glib/test/test-read-options.rb                   |    61 -
 c_glib/test/test-record-batch-builder.rb           |    86 -
 c_glib/test/test-record-batch-datum.rb             |    58 -
 c_glib/test/test-record-batch-iterator.rb          |    51 -
 c_glib/test/test-record-batch.rb                   |   185 -
 c_glib/test/test-resizable-buffer.rb               |    32 -
 c_glib/test/test-schema.rb                         |   203 -
 c_glib/test/test-slow-file-system.rb               |    43 -
 c_glib/test/test-sort-indices.rb                   |    69 -
 c_glib/test/test-sort-options.rb                   |    59 -
 c_glib/test/test-sparse-union-array.rb             |    86 -
 c_glib/test/test-sparse-union-data-type.rb         |    64 -
 c_glib/test/test-stream-writer.rb                  |    57 -
 c_glib/test/test-string-array.rb                   |    46 -
 c_glib/test/test-string-data-type.rb               |    33 -
 c_glib/test/test-struct-array.rb                   |    88 -
 c_glib/test/test-struct-data-type.rb               |   115 -
 c_glib/test/test-table-batch-reader.rb             |    42 -
 c_glib/test/test-table-datum.rb                    |    58 -
 c_glib/test/test-table.rb                          |   273 -
 c_glib/test/test-take.rb                           |   214 -
 c_glib/test/test-tensor.rb                         |   125 -
 c_glib/test/test-time-data-type.rb                 |    24 -
 c_glib/test/test-time32-array.rb                   |    69 -
 c_glib/test/test-time32-data-type.rb               |    56 -
 c_glib/test/test-time64-array.rb                   |    57 -
 c_glib/test/test-time64-data-type.rb               |    56 -
 c_glib/test/test-timestamp-array.rb                |    57 -
 c_glib/test/test-timestamp-data-type.rb            |    84 -
 c_glib/test/test-uint-array-builder.rb             |    59 -
 c_glib/test/test-uint16-array.rb                   |    60 -
 c_glib/test/test-uint16-data-type.rb               |    33 -
 c_glib/test/test-uint32-array.rb                   |    60 -
 c_glib/test/test-uint32-data-type.rb               |    33 -
 c_glib/test/test-uint64-array.rb                   |    60 -
 c_glib/test/test-uint64-data-type.rb               |    33 -
 c_glib/test/test-uint8-array.rb                    |    58 -
 c_glib/test/test-uint8-data-type.rb                |    40 -
 c_glib/test/test-unique.rb                         |    31 -
 c_glib/test/test-write-options.rb                  |   102 -
 cpp/.gitignore                                     |    43 -
 cpp/Brewfile                                       |    44 -
 cpp/CHANGELOG_PARQUET.md                           |   501 -
 cpp/CMakeLists.txt                                 |   925 -
 cpp/CMakeSettings.json                             |    21 -
 cpp/README.md                                      |    34 -
 cpp/apidoc/.gitignore                              |     1 -
 cpp/apidoc/Doxyfile                                |  2551 --
 cpp/apidoc/HDFS.md                                 |    83 -
 cpp/apidoc/footer.html                             |    31 -
 cpp/apidoc/tutorials/plasma.md                     |   450 -
 cpp/apidoc/tutorials/tensor_to_py.md               |   127 -
 cpp/build-support/asan_symbolize.py                |   368 -
 cpp/build-support/build-lz4-lib.sh                 |    25 -
 cpp/build-support/build-zstd-lib.sh                |    25 -
 cpp/build-support/cpplint.py                       |  6477 ----
 cpp/build-support/fuzzing/generate_corpuses.sh     |    52 -
 cpp/build-support/fuzzing/pack_corpus.py           |    54 -
 cpp/build-support/get-upstream-commit.sh           |    25 -
 cpp/build-support/iwyu/iwyu-filter.awk             |    96 -
 cpp/build-support/iwyu/iwyu.sh                     |    90 -
 cpp/build-support/iwyu/iwyu_tool.py                |   280 -
 cpp/build-support/iwyu/mappings/arrow-misc.imp     |    61 -
 .../iwyu/mappings/boost-all-private.imp            |  4166 ---
 cpp/build-support/iwyu/mappings/boost-all.imp      |  5679 ----
 cpp/build-support/iwyu/mappings/boost-extra.imp    |    23 -
 cpp/build-support/iwyu/mappings/gflags.imp         |    20 -
 cpp/build-support/iwyu/mappings/glog.imp           |    27 -
 cpp/build-support/iwyu/mappings/gmock.imp          |    23 -
 cpp/build-support/iwyu/mappings/gtest.imp          |    26 -
 cpp/build-support/lint_cpp_cli.py                  |   128 -
 cpp/build-support/lint_exclusions.txt              |    12 -
 cpp/build-support/lintutils.py                     |   109 -
 cpp/build-support/lsan-suppressions.txt            |    21 -
 cpp/build-support/run-infer.sh                     |    48 -
 cpp/build-support/run-test.sh                      |   237 -
 cpp/build-support/run_clang_format.py              |   137 -
 cpp/build-support/run_clang_tidy.py                |   124 -
 cpp/build-support/run_cpplint.py                   |   132 -
 cpp/build-support/sanitizer-disallowed-entries.txt |    25 -
 cpp/build-support/stacktrace_addr2line.pl          |    92 -
 cpp/build-support/trim-boost.sh                    |    72 -
 cpp/build-support/tsan-suppressions.txt            |    19 -
 cpp/build-support/ubsan-suppressions.txt           |    16 -
 cpp/build-support/update-flatbuffers.sh            |    41 -
 cpp/build-support/update-thrift.sh                 |    23 -
 cpp/build-support/vendor-flatbuffers.sh            |    31 -
 cpp/cmake_modules/BuildUtils.cmake                 |   947 -
 cpp/cmake_modules/DefineOptions.cmake              |   579 -
 cpp/cmake_modules/Find-c-aresAlt.cmake             |    71 -
 cpp/cmake_modules/FindArrow.cmake                  |   438 -
 cpp/cmake_modules/FindArrowCUDA.cmake              |    91 -
 cpp/cmake_modules/FindArrowDataset.cmake           |    91 -
 cpp/cmake_modules/FindArrowFlight.cmake            |    92 -
 cpp/cmake_modules/FindArrowFlightTesting.cmake     |   103 -
 cpp/cmake_modules/FindArrowPython.cmake            |    90 -
 cpp/cmake_modules/FindArrowPythonFlight.cmake      |    99 -
 cpp/cmake_modules/FindArrowTesting.cmake           |    91 -
 cpp/cmake_modules/FindBoostAlt.cmake               |    63 -
 cpp/cmake_modules/FindBrotli.cmake                 |   133 -
 cpp/cmake_modules/FindClangTools.cmake             |   103 -
 cpp/cmake_modules/FindGLOG.cmake                   |    54 -
 cpp/cmake_modules/FindGandiva.cmake                |    97 -
 cpp/cmake_modules/FindInferTools.cmake             |    47 -
 cpp/cmake_modules/FindLLVMAlt.cmake                |    81 -
 cpp/cmake_modules/FindLz4.cmake                    |    85 -
 cpp/cmake_modules/FindNumPy.cmake                  |    96 -
 cpp/cmake_modules/FindORC.cmake                    |    53 -
 cpp/cmake_modules/FindOpenSSLAlt.cmake             |    54 -
 cpp/cmake_modules/FindParquet.cmake                |   130 -
 cpp/cmake_modules/FindPlasma.cmake                 |   106 -
 cpp/cmake_modules/FindPython3Alt.cmake             |    99 -
 cpp/cmake_modules/FindPythonLibsNew.cmake          |   267 -
 cpp/cmake_modules/FindRapidJSONAlt.cmake           |    74 -
 cpp/cmake_modules/FindSnappy.cmake                 |    63 -
 cpp/cmake_modules/FindThrift.cmake                 |   142 -
 cpp/cmake_modules/FindgRPCAlt.cmake                |    82 -
 cpp/cmake_modules/FindgflagsAlt.cmake              |    59 -
 cpp/cmake_modules/Findjemalloc.cmake               |    94 -
 cpp/cmake_modules/Findre2Alt.cmake                 |    85 -
 cpp/cmake_modules/Findutf8proc.cmake               |    70 -
 cpp/cmake_modules/Findzstd.cmake                   |    89 -
 cpp/cmake_modules/SetupCxxFlags.cmake              |   634 -
 cpp/cmake_modules/ThirdpartyToolchain.cmake        |  2959 --
 cpp/cmake_modules/UseCython.cmake                  |   184 -
 cpp/cmake_modules/Usevcpkg.cmake                   |   217 -
 cpp/cmake_modules/san-config.cmake                 |   133 -
 cpp/examples/arrow/CMakeLists.txt                  |    36 -
 .../arrow/dataset_documentation_example.cc         |   355 -
 cpp/examples/arrow/dataset_parquet_scan_example.cc |   188 -
 cpp/examples/arrow/row_wise_conversion_example.cc  |   190 -
 cpp/examples/minimal_build/.gitignore              |    18 -
 cpp/examples/minimal_build/CMakeLists.txt          |    40 -
 cpp/examples/minimal_build/README.md               |    88 -
 cpp/examples/minimal_build/build_arrow.sh          |    35 -
 cpp/examples/minimal_build/build_example.sh        |    27 -
 cpp/examples/minimal_build/docker-compose.yml      |    51 -
 cpp/examples/minimal_build/example.cc              |    73 -
 cpp/examples/minimal_build/minimal.dockerfile      |    26 -
 cpp/examples/minimal_build/run.sh                  |    48 -
 cpp/examples/minimal_build/run_static.bat          |    88 -
 cpp/examples/minimal_build/run_static.sh           |    90 -
 .../minimal_build/system_dependency.dockerfile     |    43 -
 cpp/examples/minimal_build/test.csv                |     3 -
 cpp/examples/parquet/CMakeLists.txt                |    78 -
 .../low_level_api/encryption_reader_writer.cc      |   454 -
 .../encryption_reader_writer_all_crypto_options.cc |   664 -
 .../parquet/low_level_api/reader_writer.cc         |   413 -
 cpp/examples/parquet/low_level_api/reader_writer.h |    71 -
 .../parquet/low_level_api/reader_writer2.cc        |   434 -
 cpp/examples/parquet/parquet_arrow/CMakeLists.txt  |    42 -
 cpp/examples/parquet/parquet_arrow/README.md       |    20 -
 .../parquet/parquet_arrow/reader_writer.cc         |   144 -
 .../parquet_stream_api/stream_reader_writer.cc     |   326 -
 cpp/src/arrow/ArrowConfig.cmake.in                 |    92 -
 cpp/src/arrow/ArrowTestingConfig.cmake.in          |    36 -
 cpp/src/arrow/CMakeLists.txt                       |   708 -
 cpp/src/arrow/adapters/orc/CMakeLists.txt          |    61 -
 cpp/src/arrow/adapters/orc/adapter.cc              |   478 -
 cpp/src/arrow/adapters/orc/adapter.h               |   149 -
 cpp/src/arrow/adapters/orc/adapter_test.cc         |   160 -
 cpp/src/arrow/adapters/orc/adapter_util.cc         |   430 -
 cpp/src/arrow/adapters/orc/adapter_util.h          |    41 -
 cpp/src/arrow/adapters/orc/arrow-orc.pc.in         |    24 -
 cpp/src/arrow/adapters/tensorflow/CMakeLists.txt   |    21 -
 .../adapters/tensorflow/arrow-tensorflow.pc.in     |    24 -
 cpp/src/arrow/adapters/tensorflow/convert.h        |   128 -
 cpp/src/arrow/api.h                                |    44 -
 cpp/src/arrow/array.h                              |    32 -
 cpp/src/arrow/array/CMakeLists.txt                 |    26 -
 cpp/src/arrow/array/README.md                      |    20 -
 cpp/src/arrow/array/array_base.cc                  |   308 -
 cpp/src/arrow/array/array_base.h                   |   258 -
 cpp/src/arrow/array/array_binary.cc                |   108 -
 cpp/src/arrow/array/array_binary.h                 |   248 -
 cpp/src/arrow/array/array_binary_test.cc           |   835 -
 cpp/src/arrow/array/array_decimal.cc               |    63 -
 cpp/src/arrow/array/array_decimal.h                |    66 -
 cpp/src/arrow/array/array_dict.cc                  |   442 -
 cpp/src/arrow/array/array_dict.h                   |   180 -
 cpp/src/arrow/array/array_dict_test.cc             |  1678 -
 cpp/src/arrow/array/array_list_test.cc             |  1134 -
 cpp/src/arrow/array/array_nested.cc                |   757 -
 cpp/src/arrow/array/array_nested.h                 |   523 -
 cpp/src/arrow/array/array_primitive.cc             |    99 -
 cpp/src/arrow/array/array_primitive.h              |   135 -
 cpp/src/arrow/array/array_struct_test.cc           |   610 -
 cpp/src/arrow/array/array_test.cc                  |  2948 --
 cpp/src/arrow/array/array_union_test.cc            |   582 -
 cpp/src/arrow/array/array_view_test.cc             |   441 -
 cpp/src/arrow/array/builder_adaptive.cc            |   380 -
 cpp/src/arrow/array/builder_adaptive.h             |   203 -
 cpp/src/arrow/array/builder_base.cc                |   136 -
 cpp/src/arrow/array/builder_base.h                 |   270 -
 cpp/src/arrow/array/builder_binary.cc              |   199 -
 cpp/src/arrow/array/builder_binary.h               |   632 -
 cpp/src/arrow/array/builder_decimal.cc             |   105 -
 cpp/src/arrow/array/builder_decimal.h              |    92 -
 cpp/src/arrow/array/builder_dict.cc                |   204 -
 cpp/src/arrow/array/builder_dict.h                 |   571 -
 cpp/src/arrow/array/builder_nested.cc              |   294 -
 cpp/src/arrow/array/builder_nested.h               |   482 -
 cpp/src/arrow/array/builder_primitive.cc           |   138 -
 cpp/src/arrow/array/builder_primitive.h            |   478 -
 cpp/src/arrow/array/builder_time.h                 |    43 -
 cpp/src/arrow/array/builder_union.cc               |   121 -
 cpp/src/arrow/array/builder_union.h                |   235 -
 cpp/src/arrow/array/concatenate.cc                 |   490 -
 cpp/src/arrow/array/concatenate.h                  |    42 -
 cpp/src/arrow/array/concatenate_test.cc            |   386 -
 cpp/src/arrow/array/data.cc                        |   333 -
 cpp/src/arrow/array/data.h                         |   260 -
 cpp/src/arrow/array/dict_internal.h                |   193 -
 cpp/src/arrow/array/diff.cc                        |   784 -
 cpp/src/arrow/array/diff.h                         |    76 -
 cpp/src/arrow/array/diff_test.cc                   |   688 -
 cpp/src/arrow/array/util.cc                        |   745 -
 cpp/src/arrow/array/util.h                         |    78 -
 cpp/src/arrow/array/validate.cc                    |   657 -
 cpp/src/arrow/array/validate.h                     |    55 -
 cpp/src/arrow/arrow-config.cmake                   |    26 -
 cpp/src/arrow/arrow-testing.pc.in                  |    27 -
 cpp/src/arrow/arrow.pc.in                          |    29 -
 cpp/src/arrow/buffer.cc                            |   313 -
 cpp/src/arrow/buffer.h                             |   508 -
 cpp/src/arrow/buffer_builder.h                     |   419 -
 cpp/src/arrow/buffer_test.cc                       |   852 -
 cpp/src/arrow/builder.cc                           |   222 -
 cpp/src/arrow/builder.h                            |    32 -
 cpp/src/arrow/builder_benchmark.cc                 |   453 -
 cpp/src/arrow/c/CMakeLists.txt                     |    22 -
 cpp/src/arrow/c/abi.h                              |   103 -
 cpp/src/arrow/c/bridge.cc                          |  1712 -
 cpp/src/arrow/c/bridge.h                           |   197 -
 cpp/src/arrow/c/bridge_benchmark.cc                |   159 -
 cpp/src/arrow/c/bridge_test.cc                     |  2946 --
 cpp/src/arrow/c/helpers.h                          |   117 -
 cpp/src/arrow/c/util_internal.h                    |    85 -
 cpp/src/arrow/chunked_array.cc                     |   267 -
 cpp/src/arrow/chunked_array.h                      |   248 -
 cpp/src/arrow/chunked_array_test.cc                |   244 -
 cpp/src/arrow/compare.cc                           |  1304 -
 cpp/src/arrow/compare.h                            |   133 -
 cpp/src/arrow/compare_benchmark.cc                 |   164 -
 cpp/src/arrow/compute/CMakeLists.txt               |    70 -
 cpp/src/arrow/compute/README.md                    |    58 -
 cpp/src/arrow/compute/api.h                        |    35 -
 cpp/src/arrow/compute/api_aggregate.cc             |    77 -
 cpp/src/arrow/compute/api_aggregate.h              |   407 -
 cpp/src/arrow/compute/api_scalar.cc                |   152 -
 cpp/src/arrow/compute/api_scalar.h                 |   434 -
 cpp/src/arrow/compute/api_vector.cc                |   162 -
 cpp/src/arrow/compute/api_vector.h                 |   379 -
 cpp/src/arrow/compute/arrow-compute.pc.in          |    21 -
 cpp/src/arrow/compute/cast.cc                      |   245 -
 cpp/src/arrow/compute/cast.h                       |   171 -
 cpp/src/arrow/compute/cast_internal.h              |    42 -
 cpp/src/arrow/compute/exec.cc                      |   997 -
 cpp/src/arrow/compute/exec.h                       |   242 -
 cpp/src/arrow/compute/exec_internal.h              |   142 -
 cpp/src/arrow/compute/exec_test.cc                 |   858 -
 cpp/src/arrow/compute/function.cc                  |   295 -
 cpp/src/arrow/compute/function.h                   |   354 -
 cpp/src/arrow/compute/function_benchmark.cc        |   184 -
 cpp/src/arrow/compute/function_test.cc             |   234 -
 cpp/src/arrow/compute/kernel.cc                    |   473 -
 cpp/src/arrow/compute/kernel.h                     |   753 -
 cpp/src/arrow/compute/kernel_test.cc               |   503 -
 cpp/src/arrow/compute/kernels/CMakeLists.txt       |    67 -
 cpp/src/arrow/compute/kernels/aggregate_basic.cc   |   347 -
 .../arrow/compute/kernels/aggregate_basic_avx2.cc  |    77 -
 .../compute/kernels/aggregate_basic_avx512.cc      |    78 -
 .../compute/kernels/aggregate_basic_internal.h     |   397 -
 .../arrow/compute/kernels/aggregate_benchmark.cc   |   752 -
 cpp/src/arrow/compute/kernels/aggregate_internal.h |   165 -
 cpp/src/arrow/compute/kernels/aggregate_mode.cc    |   369 -
 .../arrow/compute/kernels/aggregate_quantile.cc    |   462 -
 cpp/src/arrow/compute/kernels/aggregate_tdigest.cc |   153 -
 cpp/src/arrow/compute/kernels/aggregate_test.cc    |  1743 -
 cpp/src/arrow/compute/kernels/aggregate_var_std.cc |   284 -
 cpp/src/arrow/compute/kernels/codegen_internal.cc  |   327 -
 cpp/src/arrow/compute/kernels/codegen_internal.h   |  1258 -
 cpp/src/arrow/compute/kernels/common.h             |    54 -
 cpp/src/arrow/compute/kernels/hash_aggregate.cc    |  1066 -
 .../arrow/compute/kernels/hash_aggregate_test.cc   |   703 -
 cpp/src/arrow/compute/kernels/scalar_arithmetic.cc |   501 -
 .../compute/kernels/scalar_arithmetic_benchmark.cc |   159 -
 .../compute/kernels/scalar_arithmetic_test.cc      |   821 -
 cpp/src/arrow/compute/kernels/scalar_boolean.cc    |   503 -
 .../compute/kernels/scalar_boolean_benchmark.cc    |    59 -
 .../arrow/compute/kernels/scalar_boolean_test.cc   |   141 -
 .../arrow/compute/kernels/scalar_cast_benchmark.cc |   117 -
 .../arrow/compute/kernels/scalar_cast_boolean.cc   |    70 -
 .../arrow/compute/kernels/scalar_cast_internal.cc  |   279 -
 .../arrow/compute/kernels/scalar_cast_internal.h   |    88 -
 .../arrow/compute/kernels/scalar_cast_nested.cc    |   126 -
 .../arrow/compute/kernels/scalar_cast_numeric.cc   |   724 -
 .../arrow/compute/kernels/scalar_cast_string.cc    |   249 -
 .../arrow/compute/kernels/scalar_cast_temporal.cc  |   456 -
 cpp/src/arrow/compute/kernels/scalar_cast_test.cc  |  1879 --
 cpp/src/arrow/compute/kernels/scalar_compare.cc    |   220 -
 .../compute/kernels/scalar_compare_benchmark.cc    |    80 -
 .../arrow/compute/kernels/scalar_compare_test.cc   |   656 -
 cpp/src/arrow/compute/kernels/scalar_fill_null.cc  |   242 -
 .../arrow/compute/kernels/scalar_fill_null_test.cc |   168 -
 cpp/src/arrow/compute/kernels/scalar_nested.cc     |   171 -
 .../arrow/compute/kernels/scalar_nested_test.cc    |   172 -
 cpp/src/arrow/compute/kernels/scalar_set_lookup.cc |   494 -
 .../compute/kernels/scalar_set_lookup_benchmark.cc |   143 -
 .../compute/kernels/scalar_set_lookup_test.cc      |   793 -
 cpp/src/arrow/compute/kernels/scalar_string.cc     |  2162 --
 .../compute/kernels/scalar_string_benchmark.cc     |   123 -
 .../arrow/compute/kernels/scalar_string_test.cc    |   600 -
 cpp/src/arrow/compute/kernels/scalar_validity.cc   |   180 -
 .../arrow/compute/kernels/scalar_validity_test.cc  |   128 -
 cpp/src/arrow/compute/kernels/test_util.cc         |   200 -
 cpp/src/arrow/compute/kernels/test_util.h          |   152 -
 cpp/src/arrow/compute/kernels/util_internal.cc     |    85 -
 cpp/src/arrow/compute/kernels/util_internal.h      |   157 -
 cpp/src/arrow/compute/kernels/vector_hash.cc       |   775 -
 .../arrow/compute/kernels/vector_hash_benchmark.cc |   250 -
 cpp/src/arrow/compute/kernels/vector_hash_test.cc  |   750 -
 cpp/src/arrow/compute/kernels/vector_nested.cc     |   104 -
 .../arrow/compute/kernels/vector_nested_test.cc    |    55 -
 .../compute/kernels/vector_partition_benchmark.cc  |    59 -
 cpp/src/arrow/compute/kernels/vector_selection.cc  |  2181 --
 .../compute/kernels/vector_selection_benchmark.cc  |   354 -
 .../arrow/compute/kernels/vector_selection_test.cc |  1721 -
 cpp/src/arrow/compute/kernels/vector_sort.cc       |  1769 -
 .../arrow/compute/kernels/vector_sort_benchmark.cc |   285 -
 cpp/src/arrow/compute/kernels/vector_sort_test.cc  |  1263 -
 cpp/src/arrow/compute/registry.cc                  |   154 -
 cpp/src/arrow/compute/registry.h                   |    83 -
 cpp/src/arrow/compute/registry_internal.h          |    54 -
 cpp/src/arrow/compute/registry_test.cc             |    87 -
 cpp/src/arrow/compute/type_fwd.h                   |    43 -
 cpp/src/arrow/compute/util_internal.h              |    32 -
 cpp/src/arrow/config.cc                            |    78 -
 cpp/src/arrow/config.h                             |    72 -
 cpp/src/arrow/csv/CMakeLists.txt                   |    39 -
 cpp/src/arrow/csv/api.h                            |    26 -
 cpp/src/arrow/csv/arrow-csv.pc.in                  |    24 -
 cpp/src/arrow/csv/chunker.cc                       |   266 -
 cpp/src/arrow/csv/chunker.h                        |    36 -
 cpp/src/arrow/csv/chunker_test.cc                  |   265 -
 cpp/src/arrow/csv/column_builder.cc                |   367 -
 cpp/src/arrow/csv/column_builder.h                 |    78 -
 cpp/src/arrow/csv/column_builder_test.cc           |   550 -
 cpp/src/arrow/csv/column_decoder.cc                |   367 -
 cpp/src/arrow/csv/column_decoder.h                 |    81 -
 cpp/src/arrow/csv/column_decoder_test.cc           |   427 -
 cpp/src/arrow/csv/converter.cc                     |   691 -
 cpp/src/arrow/csv/converter.h                      |    82 -
 cpp/src/arrow/csv/converter_benchmark.cc           |   152 -
 cpp/src/arrow/csv/converter_test.cc                |   636 -
 cpp/src/arrow/csv/inference_internal.h             |   150 -
 cpp/src/arrow/csv/options.cc                       |    40 -
 cpp/src/arrow/csv/options.h                        |   156 -
 cpp/src/arrow/csv/parser.cc                        |   549 -
 cpp/src/arrow/csv/parser.h                         |   192 -
 cpp/src/arrow/csv/parser_benchmark.cc              |   205 -
 cpp/src/arrow/csv/parser_test.cc                   |   627 -
 cpp/src/arrow/csv/reader.cc                        |   999 -
 cpp/src/arrow/csv/reader.h                         |    80 -
 cpp/src/arrow/csv/reader_test.cc                   |   212 -
 cpp/src/arrow/csv/test_common.cc                   |   119 -
 cpp/src/arrow/csv/test_common.h                    |    53 -
 cpp/src/arrow/csv/type_fwd.h                       |    27 -
 cpp/src/arrow/csv/writer.cc                        |   437 -
 cpp/src/arrow/csv/writer.h                         |    47 -
 cpp/src/arrow/csv/writer_test.cc                   |   128 -
 cpp/src/arrow/dataset/ArrowDatasetConfig.cmake.in  |    37 -
 cpp/src/arrow/dataset/CMakeLists.txt               |   134 -
 cpp/src/arrow/dataset/README.md                    |    32 -
 cpp/src/arrow/dataset/api.h                        |    29 -
 cpp/src/arrow/dataset/arrow-dataset.pc.in          |    25 -
 cpp/src/arrow/dataset/dataset.cc                   |   228 -
 cpp/src/arrow/dataset/dataset.h                    |   241 -
 cpp/src/arrow/dataset/dataset_internal.h           |   211 -
 cpp/src/arrow/dataset/dataset_test.cc              |   751 -
 cpp/src/arrow/dataset/discovery.cc                 |   275 -
 cpp/src/arrow/dataset/discovery.h                  |   264 -
 cpp/src/arrow/dataset/discovery_test.cc            |   479 -
 cpp/src/arrow/dataset/expression.cc                |  1283 -
 cpp/src/arrow/dataset/expression.h                 |   250 -
 cpp/src/arrow/dataset/expression_benchmark.cc      |    91 -
 cpp/src/arrow/dataset/expression_internal.h        |   342 -
 cpp/src/arrow/dataset/expression_test.cc           |  1282 -
 cpp/src/arrow/dataset/file_base.cc                 |   499 -
 cpp/src/arrow/dataset/file_base.h                  |   346 -
 cpp/src/arrow/dataset/file_benchmark.cc            |    87 -
 cpp/src/arrow/dataset/file_csv.cc                  |   204 -
 cpp/src/arrow/dataset/file_csv.h                   |    85 -
 cpp/src/arrow/dataset/file_csv_test.cc             |   301 -
 cpp/src/arrow/dataset/file_ipc.cc                  |   224 -
 cpp/src/arrow/dataset/file_ipc.h                   |   111 -
 cpp/src/arrow/dataset/file_ipc_test.cc             |   388 -
 cpp/src/arrow/dataset/file_parquet.cc              |   783 -
 cpp/src/arrow/dataset/file_parquet.h               |   364 -
 cpp/src/arrow/dataset/file_parquet_test.cc         |   685 -
 cpp/src/arrow/dataset/file_test.cc                 |   541 -
 cpp/src/arrow/dataset/forest_internal.h            |   124 -
 cpp/src/arrow/dataset/partition.cc                 |   617 -
 cpp/src/arrow/dataset/partition.h                  |   321 -
 cpp/src/arrow/dataset/partition_test.cc            |   728 -
 cpp/src/arrow/dataset/pch.h                        |    27 -
 cpp/src/arrow/dataset/projector.cc                 |    63 -
 cpp/src/arrow/dataset/projector.h                  |    32 -
 cpp/src/arrow/dataset/scanner.cc                   |   567 -
 cpp/src/arrow/dataset/scanner.h                    |   437 -
 cpp/src/arrow/dataset/scanner_internal.h           |   200 -
 cpp/src/arrow/dataset/scanner_test.cc              |   457 -
 cpp/src/arrow/dataset/test_util.h                  |   839 -
 cpp/src/arrow/dataset/type_fwd.h                   |    95 -
 cpp/src/arrow/dataset/visibility.h                 |    50 -
 cpp/src/arrow/datum.cc                             |   284 -
 cpp/src/arrow/datum.h                              |   281 -
 cpp/src/arrow/datum_test.cc                        |   172 -
 cpp/src/arrow/dbi/README.md                        |    24 -
 cpp/src/arrow/dbi/hiveserver2/CMakeLists.txt       |   118 -
 cpp/src/arrow/dbi/hiveserver2/api.h                |    27 -
 cpp/src/arrow/dbi/hiveserver2/columnar_row_set.cc  |   100 -
 cpp/src/arrow/dbi/hiveserver2/columnar_row_set.h   |   155 -
 cpp/src/arrow/dbi/hiveserver2/hiveserver2_test.cc  |   458 -
 cpp/src/arrow/dbi/hiveserver2/operation.cc         |   150 -
 cpp/src/arrow/dbi/hiveserver2/operation.h          |   127 -
 cpp/src/arrow/dbi/hiveserver2/public_api_test.cc   |    26 -
 cpp/src/arrow/dbi/hiveserver2/sample_usage.cc      |   137 -
 cpp/src/arrow/dbi/hiveserver2/service.cc           |   110 -
 cpp/src/arrow/dbi/hiveserver2/service.h            |   140 -
 cpp/src/arrow/dbi/hiveserver2/session.cc           |   103 -
 cpp/src/arrow/dbi/hiveserver2/session.h            |    84 -
 cpp/src/arrow/dbi/hiveserver2/thrift/.gitignore    |     1 -
 .../arrow/dbi/hiveserver2/thrift/CMakeLists.txt    |   117 -
 .../arrow/dbi/hiveserver2/thrift/ExecStats.thrift  |   103 -
 .../dbi/hiveserver2/thrift/ImpalaService.thrift    |   300 -
 cpp/src/arrow/dbi/hiveserver2/thrift/Status.thrift |    23 -
 .../dbi/hiveserver2/thrift/TCLIService.thrift      |  1180 -
 cpp/src/arrow/dbi/hiveserver2/thrift/Types.thrift  |   218 -
 .../arrow/dbi/hiveserver2/thrift/beeswax.thrift    |   174 -
 cpp/src/arrow/dbi/hiveserver2/thrift/fb303.thrift  |   112 -
 .../dbi/hiveserver2/thrift/generate_error_codes.py |   293 -
 .../dbi/hiveserver2/thrift/hive_metastore.thrift   |  1214 -
 cpp/src/arrow/dbi/hiveserver2/thrift_internal.cc   |   301 -
 cpp/src/arrow/dbi/hiveserver2/thrift_internal.h    |    91 -
 cpp/src/arrow/dbi/hiveserver2/types.cc             |    45 -
 cpp/src/arrow/dbi/hiveserver2/types.h              |   131 -
 cpp/src/arrow/dbi/hiveserver2/util.cc              |   250 -
 cpp/src/arrow/dbi/hiveserver2/util.h               |    36 -
 cpp/src/arrow/device.cc                            |   209 -
 cpp/src/arrow/device.h                             |   226 -
 cpp/src/arrow/extension_type.cc                    |   169 -
 cpp/src/arrow/extension_type.h                     |   161 -
 cpp/src/arrow/extension_type_test.cc               |   334 -
 cpp/src/arrow/filesystem/CMakeLists.txt            |    75 -
 cpp/src/arrow/filesystem/api.h                     |    28 -
 cpp/src/arrow/filesystem/arrow-filesystem.pc.in    |    24 -
 cpp/src/arrow/filesystem/filesystem.cc             |   750 -
 cpp/src/arrow/filesystem/filesystem.h              |   524 -
 cpp/src/arrow/filesystem/filesystem_test.cc        |   810 -
 cpp/src/arrow/filesystem/hdfs.cc                   |   484 -
 cpp/src/arrow/filesystem/hdfs.h                    |   111 -
 cpp/src/arrow/filesystem/hdfs_test.cc              |   312 -
 cpp/src/arrow/filesystem/localfs.cc                |   448 -
 cpp/src/arrow/filesystem/localfs.h                 |   111 -
 cpp/src/arrow/filesystem/localfs_test.cc           |   396 -
 cpp/src/arrow/filesystem/mockfs.cc                 |   767 -
 cpp/src/arrow/filesystem/mockfs.h                  |   130 -
 cpp/src/arrow/filesystem/path_util.cc              |   271 -
 cpp/src/arrow/filesystem/path_util.h               |   130 -
 cpp/src/arrow/filesystem/s3_internal.h             |   214 -
 cpp/src/arrow/filesystem/s3_test_util.h            |   154 -
 cpp/src/arrow/filesystem/s3fs.cc                   |  2139 --
 cpp/src/arrow/filesystem/s3fs.h                    |   257 -
 cpp/src/arrow/filesystem/s3fs_benchmark.cc         |   430 -
 cpp/src/arrow/filesystem/s3fs_narrative_test.cc    |   245 -
 cpp/src/arrow/filesystem/s3fs_test.cc              |   971 -
 cpp/src/arrow/filesystem/test_util.cc              |  1052 -
 cpp/src/arrow/filesystem/test_util.h               |   208 -
 cpp/src/arrow/filesystem/type_fwd.h                |    49 -
 cpp/src/arrow/filesystem/util_internal.cc          |    73 -
 cpp/src/arrow/filesystem/util_internal.h           |    56 -
 cpp/src/arrow/flight/ArrowFlightConfig.cmake.in    |    36 -
 .../arrow/flight/ArrowFlightTestingConfig.cmake.in |    37 -
 cpp/src/arrow/flight/CMakeLists.txt                |   273 -
 cpp/src/arrow/flight/README.md                     |    36 -
 cpp/src/arrow/flight/api.h                         |    27 -
 cpp/src/arrow/flight/arrow-flight-testing.pc.in    |    25 -
 cpp/src/arrow/flight/arrow-flight.pc.in            |    25 -
 cpp/src/arrow/flight/client.cc                     |  1313 -
 cpp/src/arrow/flight/client.h                      |   320 -
 cpp/src/arrow/flight/client_auth.h                 |    62 -
 cpp/src/arrow/flight/client_cookie_middleware.cc   |    65 -
 cpp/src/arrow/flight/client_cookie_middleware.h    |    33 -
 cpp/src/arrow/flight/client_header_internal.cc     |   340 -
 cpp/src/arrow/flight/client_header_internal.h      |   151 -
 cpp/src/arrow/flight/client_middleware.h           |    73 -
 cpp/src/arrow/flight/customize_protobuf.h          |   108 -
 cpp/src/arrow/flight/flight_benchmark.cc           |   431 -
 cpp/src/arrow/flight/flight_test.cc                |  2666 --
 cpp/src/arrow/flight/internal.cc                   |   514 -
 cpp/src/arrow/flight/internal.h                    |   128 -
 cpp/src/arrow/flight/middleware.h                  |    73 -
 cpp/src/arrow/flight/middleware_internal.h         |    46 -
 cpp/src/arrow/flight/pch.h                         |    26 -
 cpp/src/arrow/flight/perf.proto                    |    44 -
 cpp/src/arrow/flight/perf_server.cc                |   255 -
 cpp/src/arrow/flight/platform.h                    |    32 -
 cpp/src/arrow/flight/protocol_internal.cc          |    26 -
 cpp/src/arrow/flight/protocol_internal.h           |    28 -
 cpp/src/arrow/flight/serialization_internal.cc     |   469 -
 cpp/src/arrow/flight/serialization_internal.h      |   150 -
 cpp/src/arrow/flight/server.cc                     |  1164 -
 cpp/src/arrow/flight/server.h                      |   282 -
 cpp/src/arrow/flight/server_auth.cc                |    37 -
 cpp/src/arrow/flight/server_auth.h                 |    78 -
 cpp/src/arrow/flight/server_middleware.h           |    83 -
 cpp/src/arrow/flight/test_integration.cc           |   270 -
 cpp/src/arrow/flight/test_integration.h            |    49 -
 cpp/src/arrow/flight/test_integration_client.cc    |   244 -
 cpp/src/arrow/flight/test_integration_server.cc    |   207 -
 cpp/src/arrow/flight/test_server.cc                |    62 -
 cpp/src/arrow/flight/test_util.cc                  |   779 -
 cpp/src/arrow/flight/test_util.h                   |   238 -
 .../arrow/flight/try_compile/check_tls_opts_127.cc |    36 -
 .../arrow/flight/try_compile/check_tls_opts_132.cc |    36 -
 .../arrow/flight/try_compile/check_tls_opts_134.cc |    44 -
 .../arrow/flight/try_compile/check_tls_opts_136.cc |    38 -
 cpp/src/arrow/flight/types.cc                      |   327 -
 cpp/src/arrow/flight/types.h                       |   521 -
 cpp/src/arrow/flight/visibility.h                  |    48 -
 cpp/src/arrow/gpu/.gitignore                       |    18 -
 cpp/src/arrow/gpu/ArrowCUDAConfig.cmake.in         |    36 -
 cpp/src/arrow/gpu/CMakeLists.txt                   |    87 -
 cpp/src/arrow/gpu/arrow-cuda.pc.in                 |    26 -
 cpp/src/arrow/gpu/cuda_api.h                       |    23 -
 cpp/src/arrow/gpu/cuda_arrow_ipc.cc                |    69 -
 cpp/src/arrow/gpu/cuda_arrow_ipc.h                 |    72 -
 cpp/src/arrow/gpu/cuda_benchmark.cc                |    94 -
 cpp/src/arrow/gpu/cuda_context.cc                  |   645 -
 cpp/src/arrow/gpu/cuda_context.h                   |   309 -
 cpp/src/arrow/gpu/cuda_internal.cc                 |    66 -
 cpp/src/arrow/gpu/cuda_internal.h                  |    60 -
 cpp/src/arrow/gpu/cuda_memory.cc                   |   487 -
 cpp/src/arrow/gpu/cuda_memory.h                    |   260 -
 cpp/src/arrow/gpu/cuda_test.cc                     |   626 -
 cpp/src/arrow/gpu/cuda_version.h.in                |    25 -
 cpp/src/arrow/io/CMakeLists.txt                    |    39 -
 cpp/src/arrow/io/api.h                             |    25 -
 cpp/src/arrow/io/buffered.cc                       |   480 -
 cpp/src/arrow/io/buffered.h                        |   164 -
 cpp/src/arrow/io/buffered_test.cc                  |   667 -
 cpp/src/arrow/io/caching.cc                        |   208 -
 cpp/src/arrow/io/caching.h                         |   113 -
 cpp/src/arrow/io/compressed.cc                     |   441 -
 cpp/src/arrow/io/compressed.h                      |   115 -
 cpp/src/arrow/io/compressed_test.cc                |   305 -
 cpp/src/arrow/io/concurrency.h                     |   263 -
 cpp/src/arrow/io/file.cc                           |   775 -
 cpp/src/arrow/io/file.h                            |   221 -
 cpp/src/arrow/io/file_benchmark.cc                 |   301 -
 cpp/src/arrow/io/file_test.cc                      |  1064 -
 cpp/src/arrow/io/hdfs.cc                           |   695 -
 cpp/src/arrow/io/hdfs.h                            |   280 -
 cpp/src/arrow/io/hdfs_internal.cc                  |   545 -
 cpp/src/arrow/io/hdfs_internal.h                   |   222 -
 cpp/src/arrow/io/hdfs_test.cc                      |   464 -
 cpp/src/arrow/io/interfaces.cc                     |   441 -
 cpp/src/arrow/io/interfaces.h                      |   329 -
 cpp/src/arrow/io/memory.cc                         |   388 -
 cpp/src/arrow/io/memory.h                          |   197 -
 cpp/src/arrow/io/memory_benchmark.cc               |   359 -
 cpp/src/arrow/io/memory_test.cc                    |   756 -
 cpp/src/arrow/io/mman.h                            |   169 -
 cpp/src/arrow/io/slow.cc                           |   148 -
 cpp/src/arrow/io/slow.h                            |   118 -
 cpp/src/arrow/io/test_common.cc                    |   121 -
 cpp/src/arrow/io/test_common.h                     |    58 -
 cpp/src/arrow/io/transform.cc                      |   149 -
 cpp/src/arrow/io/transform.h                       |    56 -
 cpp/src/arrow/io/type_fwd.h                        |    60 -
 cpp/src/arrow/io/util_internal.h                   |    66 -
 cpp/src/arrow/ipc/CMakeLists.txt                   |    87 -
 cpp/src/arrow/ipc/api.h                            |    25 -
 cpp/src/arrow/ipc/dictionary.cc                    |   412 -
 cpp/src/arrow/ipc/dictionary.h                     |   177 -
 cpp/src/arrow/ipc/feather.cc                       |   817 -
 cpp/src/arrow/ipc/feather.fbs                      |   156 -
 cpp/src/arrow/ipc/feather.h                        |   140 -
 cpp/src/arrow/ipc/feather_test.cc                  |   373 -
 cpp/src/arrow/ipc/file_fuzz.cc                     |    28 -
 cpp/src/arrow/ipc/file_to_stream.cc                |    65 -
 cpp/src/arrow/ipc/generate_fuzz_corpus.cc          |   161 -
 cpp/src/arrow/ipc/generate_tensor_fuzz_corpus.cc   |   134 -
 cpp/src/arrow/ipc/json_simple.cc                   |   917 -
 cpp/src/arrow/ipc/json_simple.h                    |    57 -
 cpp/src/arrow/ipc/json_simple_test.cc              |  1333 -
 cpp/src/arrow/ipc/message.cc                       |   876 -
 cpp/src/arrow/ipc/message.h                        |   531 -
 cpp/src/arrow/ipc/metadata_internal.cc             |  1486 -
 cpp/src/arrow/ipc/metadata_internal.h              |   227 -
 cpp/src/arrow/ipc/options.cc                       |    41 -
 cpp/src/arrow/ipc/options.h                        |   161 -
 cpp/src/arrow/ipc/read_write_benchmark.cc          |   197 -
 cpp/src/arrow/ipc/read_write_test.cc               |  2356 --
 cpp/src/arrow/ipc/reader.cc                        |  1822 -
 cpp/src/arrow/ipc/reader.h                         |   495 -
 cpp/src/arrow/ipc/stream_fuzz.cc                   |    28 -
 cpp/src/arrow/ipc/stream_to_file.cc                |    61 -
 cpp/src/arrow/ipc/tensor_stream_fuzz.cc            |    29 -
 cpp/src/arrow/ipc/tensor_test.cc                   |   506 -
 cpp/src/arrow/ipc/test_common.cc                   |  1104 -
 cpp/src/arrow/ipc/test_common.h                    |   172 -
 cpp/src/arrow/ipc/type_fwd.h                       |    65 -
 cpp/src/arrow/ipc/util.h                           |    41 -
 cpp/src/arrow/ipc/writer.cc                        |  1429 -
 cpp/src/arrow/ipc/writer.h                         |   459 -
 cpp/src/arrow/json/CMakeLists.txt                  |    32 -
 cpp/src/arrow/json/api.h                           |    21 -
 cpp/src/arrow/json/arrow-json.pc.in                |    24 -
 cpp/src/arrow/json/chunked_builder.cc              |   469 -
 cpp/src/arrow/json/chunked_builder.h               |    68 -
 cpp/src/arrow/json/chunked_builder_test.cc         |   454 -
 cpp/src/arrow/json/chunker.cc                      |   181 -
 cpp/src/arrow/json/chunker.h                       |    35 -
 cpp/src/arrow/json/chunker_test.cc                 |   276 -
 cpp/src/arrow/json/converter.cc                    |   323 -
 cpp/src/arrow/json/converter.h                     |    94 -
 cpp/src/arrow/json/converter_test.cc               |   100 -
 cpp/src/arrow/json/object_parser.cc                |    83 -
 cpp/src/arrow/json/object_parser.h                 |    49 -
 cpp/src/arrow/json/object_writer.cc                |    82 -
 cpp/src/arrow/json/object_writer.h                 |    48 -
 cpp/src/arrow/json/options.cc                      |    28 -
 cpp/src/arrow/json/options.h                       |    74 -
 cpp/src/arrow/json/parser.cc                       |  1099 -
 cpp/src/arrow/json/parser.h                        |   101 -
 cpp/src/arrow/json/parser_benchmark.cc             |   164 -
 cpp/src/arrow/json/parser_test.cc                  |   254 -
 cpp/src/arrow/json/rapidjson_defs.h                |    43 -
 cpp/src/arrow/json/reader.cc                       |   227 -
 cpp/src/arrow/json/reader.h                        |    72 -
 cpp/src/arrow/json/reader_test.cc                  |   278 -
 cpp/src/arrow/json/test_common.h                   |   251 -
 cpp/src/arrow/json/type_fwd.h                      |    26 -
 cpp/src/arrow/memory_pool.cc                       |   657 -
 cpp/src/arrow/memory_pool.h                        |   178 -
 cpp/src/arrow/memory_pool_benchmark.cc             |   129 -
 cpp/src/arrow/memory_pool_test.cc                  |   174 -
 cpp/src/arrow/memory_pool_test.h                   |    92 -
 cpp/src/arrow/pch.h                                |    30 -
 cpp/src/arrow/pretty_print.cc                      |   673 -
 cpp/src/arrow/pretty_print.h                       |   123 -
 cpp/src/arrow/pretty_print_test.cc                 |   744 -
 cpp/src/arrow/public_api_test.cc                   |    89 -
 cpp/src/arrow/python/ArrowPythonConfig.cmake.in    |    36 -
 .../arrow/python/ArrowPythonFlightConfig.cmake.in  |    37 -
 cpp/src/arrow/python/CMakeLists.txt                |   186 -
 cpp/src/arrow/python/api.h                         |    30 -
 cpp/src/arrow/python/arrow-python-flight.pc.in     |    25 -
 cpp/src/arrow/python/arrow-python.pc.in            |    26 -
 cpp/src/arrow/python/arrow_to_pandas.cc            |  2294 --
 cpp/src/arrow/python/arrow_to_pandas.h             |   124 -
 cpp/src/arrow/python/benchmark.cc                  |    38 -
 cpp/src/arrow/python/benchmark.h                   |    36 -
 cpp/src/arrow/python/common.cc                     |   206 -
 cpp/src/arrow/python/common.h                      |   300 -
 cpp/src/arrow/python/datetime.cc                   |   455 -
 cpp/src/arrow/python/datetime.h                    |   183 -
 cpp/src/arrow/python/decimal.cc                    |   247 -
 cpp/src/arrow/python/decimal.h                     |   128 -
 cpp/src/arrow/python/deserialize.cc                |   495 -
 cpp/src/arrow/python/deserialize.h                 |   106 -
 cpp/src/arrow/python/extension_type.cc             |   217 -
 cpp/src/arrow/python/extension_type.h              |    85 -
 cpp/src/arrow/python/filesystem.cc                 |   206 -
 cpp/src/arrow/python/filesystem.h                  |   122 -
 cpp/src/arrow/python/flight.cc                     |   408 -
 cpp/src/arrow/python/flight.h                      |   357 -
 cpp/src/arrow/python/helpers.cc                    |   436 -
 cpp/src/arrow/python/helpers.h                     |   156 -
 cpp/src/arrow/python/inference.cc                  |   659 -
 cpp/src/arrow/python/inference.h                   |    64 -
 cpp/src/arrow/python/init.cc                       |    24 -
 cpp/src/arrow/python/init.h                        |    26 -
 cpp/src/arrow/python/io.cc                         |   374 -
 cpp/src/arrow/python/io.h                          |   116 -
 cpp/src/arrow/python/ipc.cc                        |    67 -
 cpp/src/arrow/python/ipc.h                         |    52 -
 cpp/src/arrow/python/iterators.h                   |   154 -
 cpp/src/arrow/python/numpy_convert.cc              |   561 -
 cpp/src/arrow/python/numpy_convert.h               |   120 -
 cpp/src/arrow/python/numpy_internal.h              |   182 -
 cpp/src/arrow/python/numpy_interop.h               |    96 -
 cpp/src/arrow/python/numpy_to_arrow.cc             |   854 -
 cpp/src/arrow/python/numpy_to_arrow.h              |    72 -
 cpp/src/arrow/python/pch.h                         |    24 -
 cpp/src/arrow/python/platform.h                    |    36 -
 cpp/src/arrow/python/pyarrow.cc                    |    93 -
 cpp/src/arrow/python/pyarrow.h                     |    87 -
 cpp/src/arrow/python/pyarrow_api.h                 |   239 -
 cpp/src/arrow/python/pyarrow_lib.h                 |    82 -
 cpp/src/arrow/python/python_test.cc                |   536 -
 cpp/src/arrow/python/python_to_arrow.cc            |  1062 -
 cpp/src/arrow/python/python_to_arrow.h             |    80 -
 cpp/src/arrow/python/serialize.cc                  |   798 -
 cpp/src/arrow/python/serialize.h                   |   145 -
 cpp/src/arrow/python/type_traits.h                 |   350 -
 cpp/src/arrow/python/util/CMakeLists.txt           |    32 -
 cpp/src/arrow/python/util/test_main.cc             |    41 -
 cpp/src/arrow/python/visibility.h                  |    39 -
 cpp/src/arrow/record_batch.cc                      |   348 -
 cpp/src/arrow/record_batch.h                       |   232 -
 cpp/src/arrow/record_batch_test.cc                 |   292 -
 cpp/src/arrow/result.cc                            |    36 -
 cpp/src/arrow/result.h                             |   514 -
 cpp/src/arrow/result_internal.h                    |    22 -
 cpp/src/arrow/result_test.cc                       |   728 -
 cpp/src/arrow/scalar.cc                            |   645 -
 cpp/src/arrow/scalar.h                             |   537 -
 cpp/src/arrow/scalar_test.cc                       |  1046 -
 cpp/src/arrow/sparse_tensor.cc                     |   478 -
 cpp/src/arrow/sparse_tensor.h                      |   624 -
 cpp/src/arrow/sparse_tensor_test.cc                |  1678 -
 cpp/src/arrow/status.cc                            |   143 -
 cpp/src/arrow/status.h                             |   448 -
 cpp/src/arrow/status_test.cc                       |   130 -
 cpp/src/arrow/stl.h                                |   466 -
 cpp/src/arrow/stl_allocator.h                      |   153 -
 cpp/src/arrow/stl_iterator.h                       |   142 -
 cpp/src/arrow/stl_iterator_test.cc                 |   252 -
 cpp/src/arrow/stl_test.cc                          |   558 -
 cpp/src/arrow/symbols.map                          |    38 -
 cpp/src/arrow/table.cc                             |   645 -
 cpp/src/arrow/table.h                              |   295 -
 cpp/src/arrow/table_builder.cc                     |   113 -
 cpp/src/arrow/table_builder.h                      |   110 -
 cpp/src/arrow/table_builder_test.cc                |   182 -
 cpp/src/arrow/table_test.cc                        |   753 -
 cpp/src/arrow/tensor.cc                            |   342 -
 cpp/src/arrow/tensor.h                             |   250 -
 cpp/src/arrow/tensor/CMakeLists.txt                |    25 -
 cpp/src/arrow/tensor/converter.h                   |    67 -
 cpp/src/arrow/tensor/converter_internal.h          |    88 -
 cpp/src/arrow/tensor/coo_converter.cc              |   333 -
 cpp/src/arrow/tensor/csf_converter.cc              |   289 -
 cpp/src/arrow/tensor/csx_converter.cc              |   241 -
 .../arrow/tensor/tensor_conversion_benchmark.cc    |   230 -
 cpp/src/arrow/tensor_test.cc                       |   749 -
 cpp/src/arrow/testing/CMakeLists.txt               |    37 -
 cpp/src/arrow/testing/extension_type.h             |   119 -
 cpp/src/arrow/testing/future_util.h                |   104 -
 cpp/src/arrow/testing/generator.cc                 |   182 -
 cpp/src/arrow/testing/generator.h                  |   261 -
 cpp/src/arrow/testing/gtest_common.h               |   128 -
 cpp/src/arrow/testing/gtest_compat.h               |    29 -
 cpp/src/arrow/testing/gtest_util.cc                |   854 -
 cpp/src/arrow/testing/gtest_util.h                 |   622 -
 cpp/src/arrow/testing/json_integration.cc          |   219 -
 cpp/src/arrow/testing/json_integration.h           |   129 -
 cpp/src/arrow/testing/json_integration_test.cc     |  1189 -
 cpp/src/arrow/testing/json_internal.cc             |  1755 -
 cpp/src/arrow/testing/json_internal.h              |   126 -
 cpp/src/arrow/testing/macros.h                     |    29 -
 cpp/src/arrow/testing/pch.h                        |    26 -
 cpp/src/arrow/testing/random.cc                    |   918 -
 cpp/src/arrow/testing/random.h                     |   458 -
 cpp/src/arrow/testing/random_test.cc               |   356 -
 cpp/src/arrow/testing/util.cc                      |   185 -
 cpp/src/arrow/testing/util.h                       |   190 -
 cpp/src/arrow/testing/visibility.h                 |    48 -
 cpp/src/arrow/type.cc                              |  2267 --
 cpp/src/arrow/type.h                               |  1926 --
 cpp/src/arrow/type_benchmark.cc                    |   439 -
 cpp/src/arrow/type_fwd.h                           |   677 -
 cpp/src/arrow/type_test.cc                         |  1775 -
 cpp/src/arrow/type_traits.h                        |  1012 -
 cpp/src/arrow/util/CMakeLists.txt                  |    94 -
 cpp/src/arrow/util/algorithm.h                     |    33 -
 cpp/src/arrow/util/align_util.h                    |    68 -
 cpp/src/arrow/util/align_util_test.cc              |   150 -
 cpp/src/arrow/util/async_generator.h               |  1561 -
 cpp/src/arrow/util/async_generator_test.cc         |  1456 -
 cpp/src/arrow/util/atomic_shared_ptr.h             |   111 -
 cpp/src/arrow/util/base64.h                        |    34 -
 cpp/src/arrow/util/basic_decimal.cc                |  1344 -
 cpp/src/arrow/util/basic_decimal.h                 |   341 -
 cpp/src/arrow/util/benchmark_main.cc               |    24 -
 cpp/src/arrow/util/benchmark_util.h                |   138 -
 cpp/src/arrow/util/bit_block_counter.cc            |    80 -
 cpp/src/arrow/util/bit_block_counter.h             |   529 -
 cpp/src/arrow/util/bit_block_counter_benchmark.cc  |   266 -
 cpp/src/arrow/util/bit_block_counter_test.cc       |   417 -
 cpp/src/arrow/util/bit_run_reader.cc               |    54 -
 cpp/src/arrow/util/bit_run_reader.h                |   515 -
 cpp/src/arrow/util/bit_stream_utils.h              |   433 -
 cpp/src/arrow/util/bit_util.cc                     |    71 -
 cpp/src/arrow/util/bit_util.h                      |   320 -
 cpp/src/arrow/util/bit_util_benchmark.cc           |   560 -
 cpp/src/arrow/util/bit_util_test.cc                |  2160 --
 cpp/src/arrow/util/bitmap.cc                       |    75 -
 cpp/src/arrow/util/bitmap.h                        |   309 -
 cpp/src/arrow/util/bitmap_builders.cc              |    72 -
 cpp/src/arrow/util/bitmap_builders.h               |    43 -
 cpp/src/arrow/util/bitmap_generate.h               |   111 -
 cpp/src/arrow/util/bitmap_ops.cc                   |   588 -
 cpp/src/arrow/util/bitmap_ops.h                    |   187 -
 cpp/src/arrow/util/bitmap_reader.h                 |   159 -
 cpp/src/arrow/util/bitmap_visit.h                  |    88 -
 cpp/src/arrow/util/bitmap_writer.h                 |   184 -
 cpp/src/arrow/util/bitset_stack.h                  |    89 -
 cpp/src/arrow/util/bpacking.cc                     |   178 -
 cpp/src/arrow/util/bpacking.h                      |    32 -
 cpp/src/arrow/util/bpacking_avx2.cc                |    31 -
 cpp/src/arrow/util/bpacking_avx2.h                 |    28 -
 cpp/src/arrow/util/bpacking_avx2_generated.h       |  1819 -
 cpp/src/arrow/util/bpacking_avx512.cc              |    31 -
 cpp/src/arrow/util/bpacking_avx512.h               |    28 -
 cpp/src/arrow/util/bpacking_avx512_generated.h     |  1509 -
 cpp/src/arrow/util/bpacking_default.h              |  4251 ---
 cpp/src/arrow/util/bpacking_neon.cc                |    31 -
 cpp/src/arrow/util/bpacking_neon.h                 |    28 -
 cpp/src/arrow/util/bpacking_simd128_generated.h    |  2138 --
 cpp/src/arrow/util/bpacking_simd256_generated.h    |  1270 -
 cpp/src/arrow/util/bpacking_simd512_generated.h    |   836 -
 cpp/src/arrow/util/bpacking_simd_codegen.py        |   209 -
 cpp/src/arrow/util/bpacking_simd_internal.h        |   138 -
 cpp/src/arrow/util/byte_stream_split.h             |   626 -
 cpp/src/arrow/util/cache_benchmark.cc              |   146 -
 cpp/src/arrow/util/cache_internal.h                |   210 -
 cpp/src/arrow/util/cache_test.cc                   |   290 -
 cpp/src/arrow/util/cancel.cc                       |   226 -
 cpp/src/arrow/util/cancel.h                        |   102 -
 cpp/src/arrow/util/cancel_test.cc                  |   308 -
 cpp/src/arrow/util/checked_cast.h                  |    61 -
 cpp/src/arrow/util/checked_cast_test.cc            |    74 -
 cpp/src/arrow/util/compare.h                       |    62 -
 cpp/src/arrow/util/compiler_util.h                 |    22 -
 cpp/src/arrow/util/compression.cc                  |   231 -
 cpp/src/arrow/util/compression.h                   |   181 -
 cpp/src/arrow/util/compression_benchmark.cc        |   201 -
 cpp/src/arrow/util/compression_brotli.cc           |   240 -
 cpp/src/arrow/util/compression_bz2.cc              |   281 -
 cpp/src/arrow/util/compression_internal.h          |    80 -
 cpp/src/arrow/util/compression_lz4.cc              |   489 -
 cpp/src/arrow/util/compression_snappy.cc           |    99 -
 cpp/src/arrow/util/compression_test.cc             |   604 -
 cpp/src/arrow/util/compression_zlib.cc             |   501 -
 cpp/src/arrow/util/compression_zstd.cc             |   246 -
 cpp/src/arrow/util/concurrent_map.h                |    68 -
 cpp/src/arrow/util/config.h.cmake                  |    48 -
 cpp/src/arrow/util/converter.h                     |   368 -
 cpp/src/arrow/util/cpu_info.cc                     |   563 -
 cpp/src/arrow/util/cpu_info.h                      |   143 -
 cpp/src/arrow/util/decimal.cc                      |   932 -
 cpp/src/arrow/util/decimal.h                       |   291 -
 cpp/src/arrow/util/decimal_benchmark.cc            |   219 -
 cpp/src/arrow/util/decimal_test.cc                 |  1671 -
 cpp/src/arrow/util/delimiting.cc                   |   141 -
 cpp/src/arrow/util/delimiting.h                    |   147 -
 cpp/src/arrow/util/dispatch.h                      |   115 -
 cpp/src/arrow/util/double_conversion.h             |    32 -
 cpp/src/arrow/util/endian.h                        |   181 -
 cpp/src/arrow/util/formatting.cc                   |    91 -
 cpp/src/arrow/util/formatting.h                    |   426 -
 cpp/src/arrow/util/formatting_util_test.cc         |   430 -
 cpp/src/arrow/util/functional.h                    |   130 -
 cpp/src/arrow/util/future.cc                       |   375 -
 cpp/src/arrow/util/future.h                        |   762 -
 cpp/src/arrow/util/future_iterator.h               |    75 -
 cpp/src/arrow/util/future_test.cc                  |  1602 -
 cpp/src/arrow/util/hash_util.h                     |    66 -
 cpp/src/arrow/util/hashing.h                       |   878 -
 cpp/src/arrow/util/hashing_benchmark.cc            |   123 -
 cpp/src/arrow/util/hashing_test.cc                 |   490 -
 cpp/src/arrow/util/int128_internal.h               |    45 -
 cpp/src/arrow/util/int_util.cc                     |   952 -
 cpp/src/arrow/util/int_util.h                      |   117 -
 cpp/src/arrow/util/int_util_benchmark.cc           |   143 -
 cpp/src/arrow/util/int_util_internal.h             |   125 -
 cpp/src/arrow/util/int_util_test.cc                |   597 -
 cpp/src/arrow/util/io_util.cc                      |  1724 -
 cpp/src/arrow/util/io_util.h                       |   403 -
 cpp/src/arrow/util/io_util_test.cc                 |   700 -
 cpp/src/arrow/util/io_util_test.manifest           |    39 -
 cpp/src/arrow/util/io_util_test.rc                 |    44 -
 cpp/src/arrow/util/iterator.h                      |   563 -
 cpp/src/arrow/util/iterator_test.cc                |   464 -
 cpp/src/arrow/util/key_value_metadata.cc           |   269 -
 cpp/src/arrow/util/key_value_metadata.h            |    96 -
 cpp/src/arrow/util/key_value_metadata_test.cc      |   211 -
 cpp/src/arrow/util/logging.cc                      |   256 -
 cpp/src/arrow/util/logging.h                       |   259 -
 cpp/src/arrow/util/logging_test.cc                 |   103 -
 cpp/src/arrow/util/machine_benchmark.cc            |    74 -
 cpp/src/arrow/util/macros.h                        |   185 -
 cpp/src/arrow/util/make_unique.h                   |    42 -
 cpp/src/arrow/util/map.h                           |    63 -
 cpp/src/arrow/util/memory.cc                       |    74 -
 cpp/src/arrow/util/memory.h                        |    43 -
 cpp/src/arrow/util/mutex.cc                        |    54 -
 cpp/src/arrow/util/mutex.h                         |    64 -
 cpp/src/arrow/util/optional.h                      |    35 -
 cpp/src/arrow/util/parallel.h                      |    65 -
 cpp/src/arrow/util/print.h                         |    51 -
 cpp/src/arrow/util/queue.h                         |    29 -
 cpp/src/arrow/util/queue_benchmark.cc              |    85 -
 cpp/src/arrow/util/queue_test.cc                   |    55 -
 cpp/src/arrow/util/range.h                         |   155 -
 cpp/src/arrow/util/range_benchmark.cc              |   128 -
 cpp/src/arrow/util/range_test.cc                   |    69 -
 cpp/src/arrow/util/rle_encoding.h                  |   826 -
 cpp/src/arrow/util/rle_encoding_test.cc            |   573 -
 cpp/src/arrow/util/simd.h                          |    50 -
 cpp/src/arrow/util/sort.h                          |    78 -
 cpp/src/arrow/util/spaced.h                        |    98 -
 cpp/src/arrow/util/stl_util_test.cc                |   172 -
 cpp/src/arrow/util/stopwatch.h                     |    48 -
 cpp/src/arrow/util/string.cc                       |   191 -
 cpp/src/arrow/util/string.h                        |    79 -
 cpp/src/arrow/util/string_builder.cc               |    40 -
 cpp/src/arrow/util/string_builder.h                |    84 -
 cpp/src/arrow/util/string_test.cc                  |   144 -
 cpp/src/arrow/util/string_view.h                   |    38 -
 cpp/src/arrow/util/task_group.cc                   |   224 -
 cpp/src/arrow/util/task_group.h                    |   106 -
 cpp/src/arrow/util/task_group_test.cc              |   444 -
 cpp/src/arrow/util/tdigest.cc                      |   416 -
 cpp/src/arrow/util/tdigest.h                       |   103 -
 cpp/src/arrow/util/tdigest_benchmark.cc            |    48 -
 cpp/src/arrow/util/tdigest_test.cc                 |   290 -
 cpp/src/arrow/util/test_common.cc                  |    68 -
 cpp/src/arrow/util/test_common.h                   |    88 -
 cpp/src/arrow/util/thread_pool.cc                  |   440 -
 cpp/src/arrow/util/thread_pool.h                   |   343 -
 cpp/src/arrow/util/thread_pool_benchmark.cc        |   231 -
 cpp/src/arrow/util/thread_pool_test.cc             |   666 -
 cpp/src/arrow/util/time.cc                         |    68 -
 cpp/src/arrow/util/time.h                          |    82 -
 cpp/src/arrow/util/time_test.cc                    |    63 -
 cpp/src/arrow/util/trie.cc                         |   211 -
 cpp/src/arrow/util/trie.h                          |   245 -
 cpp/src/arrow/util/trie_benchmark.cc               |   222 -
 cpp/src/arrow/util/trie_test.cc                    |   305 -
 cpp/src/arrow/util/type_fwd.h                      |    62 -
 cpp/src/arrow/util/type_traits.h                   |    86 -
 cpp/src/arrow/util/ubsan.h                         |    88 -
 cpp/src/arrow/util/uri.cc                          |   292 -
 cpp/src/arrow/util/uri.h                           |   104 -
 cpp/src/arrow/util/uri_test.cc                     |   312 -
 cpp/src/arrow/util/utf8.cc                         |   158 -
 cpp/src/arrow/util/utf8.h                          |   522 -
 cpp/src/arrow/util/utf8_util_benchmark.cc          |   142 -
 cpp/src/arrow/util/utf8_util_test.cc               |   493 -
 cpp/src/arrow/util/value_parsing.cc                |    87 -
 cpp/src/arrow/util/value_parsing.h                 |   780 -
 cpp/src/arrow/util/value_parsing_benchmark.cc      |   255 -
 cpp/src/arrow/util/value_parsing_test.cc           |   460 -
 cpp/src/arrow/util/variant.h                       |   440 -
 cpp/src/arrow/util/variant_benchmark.cc            |   248 -
 cpp/src/arrow/util/variant_test.cc                 |   330 -
 cpp/src/arrow/util/vector.h                        |   137 -
 cpp/src/arrow/util/visibility.h                    |    45 -
 cpp/src/arrow/util/windows_compatibility.h         |    42 -
 cpp/src/arrow/util/windows_fixup.h                 |    45 -
 cpp/src/arrow/vendored/CMakeLists.txt              |    21 -
 cpp/src/arrow/vendored/ProducerConsumerQueue.h     |   217 -
 cpp/src/arrow/vendored/base64.cpp                  |   128 -
 cpp/src/arrow/vendored/datetime.h                  |    26 -
 cpp/src/arrow/vendored/datetime/CMakeLists.txt     |    18 -
 cpp/src/arrow/vendored/datetime/README.md          |    21 -
 cpp/src/arrow/vendored/datetime/date.h             |  7949 -----
 cpp/src/arrow/vendored/datetime/ios.h              |    53 -
 cpp/src/arrow/vendored/datetime/ios.mm             |   340 -
 cpp/src/arrow/vendored/datetime/tz.cpp             |  3877 ---
 cpp/src/arrow/vendored/datetime/tz.h               |  2804 --
 cpp/src/arrow/vendored/datetime/tz_private.h       |   319 -
 cpp/src/arrow/vendored/datetime/visibility.h       |    26 -
 .../arrow/vendored/double-conversion/.gitignore    |     1 -
 .../vendored/double-conversion/CMakeLists.txt      |    18 -
 cpp/src/arrow/vendored/double-conversion/README.md |    20 -
 .../vendored/double-conversion/bignum-dtoa.cc      |   641 -
 .../arrow/vendored/double-conversion/bignum-dtoa.h |    84 -
 cpp/src/arrow/vendored/double-conversion/bignum.cc |   767 -
 cpp/src/arrow/vendored/double-conversion/bignum.h  |   144 -
 .../vendored/double-conversion/cached-powers.cc    |   175 -
 .../vendored/double-conversion/cached-powers.h     |    64 -
 cpp/src/arrow/vendored/double-conversion/diy-fp.cc |    57 -
 cpp/src/arrow/vendored/double-conversion/diy-fp.h  |   118 -
 .../double-conversion/double-conversion.cc         |  1171 -
 .../vendored/double-conversion/double-conversion.h |   587 -
 .../arrow/vendored/double-conversion/fast-dtoa.cc  |   665 -
 .../arrow/vendored/double-conversion/fast-dtoa.h   |    88 -
 .../arrow/vendored/double-conversion/fixed-dtoa.cc |   405 -
 .../arrow/vendored/double-conversion/fixed-dtoa.h  |    56 -
 cpp/src/arrow/vendored/double-conversion/ieee.h    |   402 -
 cpp/src/arrow/vendored/double-conversion/strtod.cc |   580 -
 cpp/src/arrow/vendored/double-conversion/strtod.h  |    45 -
 cpp/src/arrow/vendored/double-conversion/utils.h   |   367 -
 cpp/src/arrow/vendored/fast_float/README.md        |     7 -
 cpp/src/arrow/vendored/fast_float/ascii_number.h   |   301 -
 .../arrow/vendored/fast_float/decimal_to_binary.h  |   176 -
 cpp/src/arrow/vendored/fast_float/fast_float.h     |    48 -
 cpp/src/arrow/vendored/fast_float/fast_table.h     |   691 -
 cpp/src/arrow/vendored/fast_float/float_common.h   |   345 -
 cpp/src/arrow/vendored/fast_float/parse_number.h   |   133 -
 .../fast_float/simple_decimal_conversion.h         |   362 -
 cpp/src/arrow/vendored/musl/README.md              |    25 -
 cpp/src/arrow/vendored/musl/strptime.c             |   237 -
 cpp/src/arrow/vendored/optional.hpp                |  1553 -
 cpp/src/arrow/vendored/portable-snippets/README.md |    10 -
 .../arrow/vendored/portable-snippets/safe-math.h   |  1072 -
 cpp/src/arrow/vendored/string_view.hpp             |  1531 -
 cpp/src/arrow/vendored/strptime.h                  |    35 -
 cpp/src/arrow/vendored/uriparser/README.md         |    25 -
 cpp/src/arrow/vendored/uriparser/Uri.h             |  1090 -
 cpp/src/arrow/vendored/uriparser/UriBase.h         |   377 -
 cpp/src/arrow/vendored/uriparser/UriCommon.c       |   572 -
 cpp/src/arrow/vendored/uriparser/UriCommon.h       |   109 -
 cpp/src/arrow/vendored/uriparser/UriCompare.c      |   168 -
 cpp/src/arrow/vendored/uriparser/UriDefsAnsi.h     |    82 -
 cpp/src/arrow/vendored/uriparser/UriDefsConfig.h   |   102 -
 cpp/src/arrow/vendored/uriparser/UriDefsUnicode.h  |    82 -
 cpp/src/arrow/vendored/uriparser/UriEscape.c       |   453 -
 cpp/src/arrow/vendored/uriparser/UriFile.c         |   242 -
 cpp/src/arrow/vendored/uriparser/UriIp4.c          |   329 -
 cpp/src/arrow/vendored/uriparser/UriIp4.h          |   110 -
 cpp/src/arrow/vendored/uriparser/UriIp4Base.c      |    96 -
 cpp/src/arrow/vendored/uriparser/UriIp4Base.h      |    59 -
 cpp/src/arrow/vendored/uriparser/UriMemory.c       |   468 -
 cpp/src/arrow/vendored/uriparser/UriMemory.h       |    78 -
 cpp/src/arrow/vendored/uriparser/UriNormalize.c    |   771 -
 .../arrow/vendored/uriparser/UriNormalizeBase.c    |   119 -
 .../arrow/vendored/uriparser/UriNormalizeBase.h    |    53 -
 cpp/src/arrow/vendored/uriparser/UriParse.c        |  2410 --
 cpp/src/arrow/vendored/uriparser/UriParseBase.c    |    90 -
 cpp/src/arrow/vendored/uriparser/UriParseBase.h    |    55 -
 cpp/src/arrow/vendored/uriparser/UriQuery.c        |   501 -
 cpp/src/arrow/vendored/uriparser/UriRecompose.c    |   577 -
 cpp/src/arrow/vendored/uriparser/UriResolve.c      |   329 -
 cpp/src/arrow/vendored/uriparser/UriShorten.c      |   324 -
 cpp/src/arrow/vendored/uriparser/config.h          |    47 -
 cpp/src/arrow/vendored/utfcpp/README.md            |    28 -
 cpp/src/arrow/vendored/utfcpp/checked.h            |   333 -
 cpp/src/arrow/vendored/utfcpp/core.h               |   338 -
 cpp/src/arrow/vendored/utfcpp/cpp11.h              |   103 -
 cpp/src/arrow/vendored/xxhash.h                    |    18 -
 cpp/src/arrow/vendored/xxhash/README.md            |    22 -
 cpp/src/arrow/vendored/xxhash/xxhash.c             |    43 -
 cpp/src/arrow/vendored/xxhash/xxhash.h             |  4769 ---
 cpp/src/arrow/visitor.cc                           |   169 -
 cpp/src/arrow/visitor.h                            |   152 -
 cpp/src/arrow/visitor_inline.h                     |   449 -
 cpp/src/gandiva/CMakeLists.txt                     |   249 -
 cpp/src/gandiva/GandivaConfig.cmake.in             |    36 -
 cpp/src/gandiva/annotator.cc                       |   118 -
 cpp/src/gandiva/annotator.h                        |    81 -
 cpp/src/gandiva/annotator_test.cc                  |   102 -
 cpp/src/gandiva/arrow.h                            |    57 -
 cpp/src/gandiva/basic_decimal_scalar.h             |    65 -
 cpp/src/gandiva/bitmap_accumulator.cc              |    75 -
 cpp/src/gandiva/bitmap_accumulator.h               |    79 -
 cpp/src/gandiva/bitmap_accumulator_test.cc         |   112 -
 cpp/src/gandiva/cache.cc                           |    45 -
 cpp/src/gandiva/cache.h                            |    59 -
 cpp/src/gandiva/cast_time.cc                       |    85 -
 cpp/src/gandiva/compiled_expr.h                    |    71 -
 cpp/src/gandiva/condition.h                        |    37 -
 cpp/src/gandiva/configuration.cc                   |    43 -
 cpp/src/gandiva/configuration.h                    |    84 -
 cpp/src/gandiva/context_helper.cc                  |    76 -
 cpp/src/gandiva/date_utils.cc                      |   232 -
 cpp/src/gandiva/date_utils.h                       |    52 -
 cpp/src/gandiva/decimal_ir.cc                      |   559 -
 cpp/src/gandiva/decimal_ir.h                       |   188 -
 cpp/src/gandiva/decimal_scalar.h                   |    76 -
 cpp/src/gandiva/decimal_type_util.cc               |    75 -
 cpp/src/gandiva/decimal_type_util.h                |    83 -
 cpp/src/gandiva/decimal_type_util_test.cc          |    58 -
 cpp/src/gandiva/decimal_xlarge.cc                  |   284 -
 cpp/src/gandiva/decimal_xlarge.h                   |    41 -
 cpp/src/gandiva/dex.h                              |   378 -
 cpp/src/gandiva/dex_visitor.h                      |    92 -
 cpp/src/gandiva/engine.cc                          |   338 -
 cpp/src/gandiva/engine.h                           |   104 -
 cpp/src/gandiva/engine_llvm_test.cc                |   131 -
 cpp/src/gandiva/eval_batch.h                       |   107 -
 cpp/src/gandiva/execution_context.h                |    54 -
 cpp/src/gandiva/exported_funcs.h                   |    59 -
 cpp/src/gandiva/exported_funcs_registry.cc         |    30 -
 cpp/src/gandiva/exported_funcs_registry.h          |    54 -
 cpp/src/gandiva/expr_decomposer.cc                 |   308 -
 cpp/src/gandiva/expr_decomposer.h                  |   125 -
 cpp/src/gandiva/expr_decomposer_test.cc            |   409 -
 cpp/src/gandiva/expr_validator.cc                  |   184 -
 cpp/src/gandiva/expr_validator.h                   |    78 -
 cpp/src/gandiva/expression.cc                      |    25 -
 cpp/src/gandiva/expression.h                       |    46 -
 cpp/src/gandiva/expression_registry.cc             |   187 -
 cpp/src/gandiva/expression_registry.h              |    71 -
 cpp/src/gandiva/expression_registry_test.cc        |    68 -
 cpp/src/gandiva/field_descriptor.h                 |    69 -
 cpp/src/gandiva/filter.cc                          |   163 -
 cpp/src/gandiva/filter.h                           |   112 -
 cpp/src/gandiva/formatting_utils.h                 |    69 -
 cpp/src/gandiva/func_descriptor.h                  |    50 -
 cpp/src/gandiva/function_holder.h                  |    34 -
 cpp/src/gandiva/function_holder_registry.h         |    73 -
 cpp/src/gandiva/function_ir_builder.cc             |    81 -
 cpp/src/gandiva/function_ir_builder.h              |    61 -
 cpp/src/gandiva/function_registry.cc               |    83 -
 cpp/src/gandiva/function_registry.h                |    47 -
 cpp/src/gandiva/function_registry_arithmetic.cc    |   111 -
 cpp/src/gandiva/function_registry_arithmetic.h     |    27 -
 cpp/src/gandiva/function_registry_common.h         |   263 -
 cpp/src/gandiva/function_registry_datetime.cc      |    94 -
 cpp/src/gandiva/function_registry_datetime.h       |    27 -
 cpp/src/gandiva/function_registry_hash.cc          |    63 -
 cpp/src/gandiva/function_registry_hash.h           |    27 -
 cpp/src/gandiva/function_registry_math_ops.cc      |   106 -
 cpp/src/gandiva/function_registry_math_ops.h       |    27 -
 cpp/src/gandiva/function_registry_string.cc        |   254 -
 cpp/src/gandiva/function_registry_string.h         |    27 -
 cpp/src/gandiva/function_registry_test.cc          |    96 -
 .../function_registry_timestamp_arithmetic.cc      |    84 -
 .../function_registry_timestamp_arithmetic.h       |    27 -
 cpp/src/gandiva/function_signature.cc              |   113 -
 cpp/src/gandiva/function_signature.h               |    55 -
 cpp/src/gandiva/function_signature_test.cc         |   113 -
 cpp/src/gandiva/gandiva.pc.in                      |    27 -
 cpp/src/gandiva/gandiva_aliases.h                  |    62 -
 cpp/src/gandiva/gdv_function_stubs.cc              |  1004 -
 cpp/src/gandiva/gdv_function_stubs.h               |   111 -
 cpp/src/gandiva/gdv_function_stubs_test.cc         |   293 -
 cpp/src/gandiva/hash_utils.cc                      |   134 -
 cpp/src/gandiva/hash_utils.h                       |    44 -
 cpp/src/gandiva/hash_utils_test.cc                 |   164 -
 cpp/src/gandiva/in_holder.h                        |    91 -
 cpp/src/gandiva/jni/CMakeLists.txt                 |   109 -
 cpp/src/gandiva/jni/config_builder.cc              |    53 -
 cpp/src/gandiva/jni/config_holder.cc               |    30 -
 cpp/src/gandiva/jni/config_holder.h                |    68 -
 cpp/src/gandiva/jni/env_helper.h                   |    23 -
 cpp/src/gandiva/jni/expression_registry_helper.cc  |   190 -
 cpp/src/gandiva/jni/id_to_module_map.h             |    66 -
 cpp/src/gandiva/jni/jni_common.cc                  |  1039 -
 cpp/src/gandiva/jni/module_holder.h                |    59 -
 cpp/src/gandiva/jni/symbols.map                    |    20 -
 cpp/src/gandiva/like_holder.cc                     |   100 -
 cpp/src/gandiva/like_holder.h                      |    59 -
 cpp/src/gandiva/like_holder_test.cc                |   130 -
 cpp/src/gandiva/literal_holder.cc                  |    45 -
 cpp/src/gandiva/literal_holder.h                   |    36 -
 cpp/src/gandiva/llvm_generator.cc                  |  1392 -
 cpp/src/gandiva/llvm_generator.h                   |   251 -
 cpp/src/gandiva/llvm_generator_test.cc             |   116 -
 cpp/src/gandiva/llvm_includes.h                    |    43 -
 cpp/src/gandiva/llvm_types.cc                      |    48 -
 cpp/src/gandiva/llvm_types.h                       |   130 -
 cpp/src/gandiva/llvm_types_test.cc                 |    61 -
 cpp/src/gandiva/local_bitmaps_holder.h             |    85 -
 cpp/src/gandiva/lru_cache.h                        |   121 -
 cpp/src/gandiva/lru_cache_test.cc                  |    64 -
 cpp/src/gandiva/lvalue.h                           |    77 -
 cpp/src/gandiva/make_precompiled_bitcode.py        |    49 -
 cpp/src/gandiva/native_function.h                  |    81 -
 cpp/src/gandiva/node.h                             |   299 -
 cpp/src/gandiva/node_visitor.h                     |    53 -
 cpp/src/gandiva/pch.h                              |    24 -
 cpp/src/gandiva/precompiled/CMakeLists.txt         |   143 -
 cpp/src/gandiva/precompiled/arithmetic_ops.cc      |   259 -
 cpp/src/gandiva/precompiled/arithmetic_ops_test.cc |   140 -
 cpp/src/gandiva/precompiled/bitmap.cc              |    60 -
 cpp/src/gandiva/precompiled/bitmap_test.cc         |    62 -
 cpp/src/gandiva/precompiled/decimal_ops.cc         |   723 -
 cpp/src/gandiva/precompiled/decimal_ops.h          |    90 -
 cpp/src/gandiva/precompiled/decimal_ops_test.cc    |  1095 -
 cpp/src/gandiva/precompiled/decimal_wrapper.cc     |   433 -
 cpp/src/gandiva/precompiled/epoch_time_point.h     |   104 -
 .../gandiva/precompiled/epoch_time_point_test.cc   |   103 -
 cpp/src/gandiva/precompiled/extended_math_ops.cc   |   370 -
 .../gandiva/precompiled/extended_math_ops_test.cc  |   276 -
 cpp/src/gandiva/precompiled/hash.cc                |   407 -
 cpp/src/gandiva/precompiled/hash_test.cc           |   122 -
 cpp/src/gandiva/precompiled/print.cc               |    28 -
 cpp/src/gandiva/precompiled/string_ops.cc          |  1523 -
 cpp/src/gandiva/precompiled/string_ops_test.cc     |  1091 -
 cpp/src/gandiva/precompiled/testing.h              |    43 -
 cpp/src/gandiva/precompiled/time.cc                |   831 -
 cpp/src/gandiva/precompiled/time_constants.h       |    30 -
 cpp/src/gandiva/precompiled/time_fields.h          |    35 -
 cpp/src/gandiva/precompiled/time_test.cc           |   746 -
 .../gandiva/precompiled/timestamp_arithmetic.cc    |   242 -
 cpp/src/gandiva/precompiled/types.h                |   457 -
 cpp/src/gandiva/precompiled_bitcode.cc.in          |    26 -
 cpp/src/gandiva/projector.cc                       |   362 -
 cpp/src/gandiva/projector.h                        |   143 -
 cpp/src/gandiva/proto/Types.proto                  |   245 -
 cpp/src/gandiva/random_generator_holder.cc         |    45 -
 cpp/src/gandiva/random_generator_holder.h          |    57 -
 cpp/src/gandiva/random_generator_holder_test.cc    |   103 -
 cpp/src/gandiva/regex_util.cc                      |    63 -
 cpp/src/gandiva/regex_util.h                       |    45 -
 cpp/src/gandiva/selection_vector.cc                |   179 -
 cpp/src/gandiva/selection_vector.h                 |   151 -
 cpp/src/gandiva/selection_vector_impl.h            |   108 -
 cpp/src/gandiva/selection_vector_test.cc           |   270 -
 cpp/src/gandiva/simple_arena.h                     |   160 -
 cpp/src/gandiva/simple_arena_test.cc               |   102 -
 cpp/src/gandiva/symbols.map                        |    35 -
 cpp/src/gandiva/tests/CMakeLists.txt               |    42 -
 cpp/src/gandiva/tests/binary_test.cc               |    89 -
 cpp/src/gandiva/tests/boolean_expr_test.cc         |   388 -
 cpp/src/gandiva/tests/date_time_test.cc            |   590 -
 cpp/src/gandiva/tests/decimal_single_test.cc       |   305 -
 cpp/src/gandiva/tests/decimal_test.cc              |  1194 -
 cpp/src/gandiva/tests/filter_project_test.cc       |   276 -
 cpp/src/gandiva/tests/filter_test.cc               |   340 -
 cpp/src/gandiva/tests/generate_data.h              |   152 -
 cpp/src/gandiva/tests/hash_test.cc                 |   431 -
 cpp/src/gandiva/tests/huge_table_test.cc           |   157 -
 cpp/src/gandiva/tests/if_expr_test.cc              |   378 -
 cpp/src/gandiva/tests/in_expr_test.cc              |   196 -
 cpp/src/gandiva/tests/literal_test.cc              |   232 -
 cpp/src/gandiva/tests/micro_benchmarks.cc          |   456 -
 cpp/src/gandiva/tests/null_validity_test.cc        |   175 -
 .../tests/projector_build_validation_test.cc       |   287 -
 cpp/src/gandiva/tests/projector_test.cc            |  1013 -
 cpp/src/gandiva/tests/test_util.h                  |   103 -
 cpp/src/gandiva/tests/timed_evaluate.h             |   136 -
 cpp/src/gandiva/tests/to_string_test.cc            |    88 -
 cpp/src/gandiva/tests/utf8_test.cc                 |   640 -
 cpp/src/gandiva/to_date_holder.cc                  |   116 -
 cpp/src/gandiva/to_date_holder.h                   |    58 -
 cpp/src/gandiva/to_date_holder_test.cc             |   152 -
 cpp/src/gandiva/tree_expr_builder.cc               |   221 -
 cpp/src/gandiva/tree_expr_builder.h                |   130 -
 cpp/src/gandiva/tree_expr_test.cc                  |   159 -
 cpp/src/gandiva/value_validity_pair.h              |    48 -
 cpp/src/gandiva/visibility.h                       |    48 -
 cpp/src/generated/File_generated.h                 |   200 -
 cpp/src/generated/Message_generated.h              |   659 -
 cpp/src/generated/Schema_generated.h               |  2265 --
 cpp/src/generated/SparseTensor_generated.h         |   913 -
 cpp/src/generated/Tensor_generated.h               |   387 -
 cpp/src/generated/feather_generated.h              |   863 -
 cpp/src/generated/parquet_constants.cpp            |    17 -
 cpp/src/generated/parquet_constants.h              |    24 -
 cpp/src/generated/parquet_types.cpp                |  7411 -----
 cpp/src/generated/parquet_types.h                  |  2916 --
 cpp/src/jni/CMakeLists.txt                         |    27 -
 cpp/src/jni/dataset/CMakeLists.txt                 |    65 -
 cpp/src/jni/dataset/jni_util.cc                    |   242 -
 cpp/src/jni/dataset/jni_util.h                     |   135 -
 cpp/src/jni/dataset/jni_util_test.cc               |   134 -
 cpp/src/jni/dataset/jni_wrapper.cc                 |   546 -
 cpp/src/jni/orc/CMakeLists.txt                     |    53 -
 cpp/src/jni/orc/concurrent_map.h                   |    77 -
 cpp/src/jni/orc/jni_wrapper.cpp                    |   311 -
 cpp/src/parquet/CMakeLists.txt                     |   425 -
 cpp/src/parquet/ParquetConfig.cmake.in             |    43 -
 cpp/src/parquet/README                             |    10 -
 cpp/src/parquet/api/CMakeLists.txt                 |    19 -
 cpp/src/parquet/api/io.h                           |    20 -
 cpp/src/parquet/api/reader.h                       |    35 -
 cpp/src/parquet/api/schema.h                       |    21 -
 cpp/src/parquet/api/writer.h                       |    25 -
 cpp/src/parquet/arrow/CMakeLists.txt               |    31 -
 cpp/src/parquet/arrow/arrow_reader_writer_test.cc  |  3945 ---
 cpp/src/parquet/arrow/arrow_schema_test.cc         |  1563 -
 cpp/src/parquet/arrow/fuzz.cc                      |    25 -
 cpp/src/parquet/arrow/generate_fuzz_corpus.cc      |   198 -
 cpp/src/parquet/arrow/path_internal.cc             |   900 -
 cpp/src/parquet/arrow/path_internal.h              |   155 -
 cpp/src/parquet/arrow/path_internal_test.cc        |   597 -
 cpp/src/parquet/arrow/reader.cc                    |  1123 -
 cpp/src/parquet/arrow/reader.h                     |   324 -
 cpp/src/parquet/arrow/reader_internal.cc           |   778 -
 cpp/src/parquet/arrow/reader_internal.h            |   122 -
 cpp/src/parquet/arrow/reader_writer_benchmark.cc   |   564 -
 cpp/src/parquet/arrow/reconstruct_internal_test.cc |  1639 -
 cpp/src/parquet/arrow/schema.cc                    |  1053 -
 cpp/src/parquet/arrow/schema.h                     |   184 -
 cpp/src/parquet/arrow/schema_internal.cc           |   225 -
 cpp/src/parquet/arrow/schema_internal.h            |    48 -
 cpp/src/parquet/arrow/test_util.h                  |   512 -
 cpp/src/parquet/arrow/writer.cc                    |   484 -
 cpp/src/parquet/arrow/writer.h                     |   105 -
 cpp/src/parquet/bloom_filter.cc                    |   162 -
 cpp/src/parquet/bloom_filter.h                     |   247 -
 cpp/src/parquet/bloom_filter_test.cc               |   247 -
 cpp/src/parquet/column_io_benchmark.cc             |   261 -
 cpp/src/parquet/column_page.h                      |   160 -
 cpp/src/parquet/column_reader.cc                   |  1726 -
 cpp/src/parquet/column_reader.h                    |   333 -
 cpp/src/parquet/column_reader_test.cc              |   390 -
 cpp/src/parquet/column_scanner.cc                  |    91 -
 cpp/src/parquet/column_scanner.h                   |   262 -
 cpp/src/parquet/column_scanner_test.cc             |   235 -
 cpp/src/parquet/column_writer.cc                   |  2067 --
 cpp/src/parquet/column_writer.h                    |   270 -
 cpp/src/parquet/column_writer_test.cc              |  1019 -
 cpp/src/parquet/encoding.cc                        |  2527 --
 cpp/src/parquet/encoding.h                         |   442 -
 cpp/src/parquet/encoding_benchmark.cc              |   802 -
 cpp/src/parquet/encoding_test.cc                   |  1247 -
 cpp/src/parquet/encryption/CMakeLists.txt          |    19 -
 cpp/src/parquet/encryption/crypto_factory.cc       |   175 -
 cpp/src/parquet/encryption/crypto_factory.h        |   135 -
 cpp/src/parquet/encryption/encryption.cc           |   412 -
 cpp/src/parquet/encryption/encryption.h            |   510 -
 cpp/src/parquet/encryption/encryption_internal.cc  |   613 -
 cpp/src/parquet/encryption/encryption_internal.h   |   116 -
 .../encryption/encryption_internal_nossl.cc        |   110 -
 .../parquet/encryption/file_key_material_store.h   |    31 -
 cpp/src/parquet/encryption/file_key_unwrapper.cc   |   114 -
 cpp/src/parquet/encryption/file_key_unwrapper.h    |    66 -
 cpp/src/parquet/encryption/file_key_wrapper.cc     |   109 -
 cpp/src/parquet/encryption/file_key_wrapper.h      |    82 -
 .../parquet/encryption/internal_file_decryptor.cc  |   240 -
 .../parquet/encryption/internal_file_decryptor.h   |   121 -
 .../parquet/encryption/internal_file_encryptor.cc  |   170 -
 .../parquet/encryption/internal_file_encryptor.h   |   109 -
 cpp/src/parquet/encryption/key_encryption_key.h    |    61 -
 cpp/src/parquet/encryption/key_management_test.cc  |   225 -
 cpp/src/parquet/encryption/key_material.cc         |   159 -
 cpp/src/parquet/encryption/key_material.h          |   131 -
 cpp/src/parquet/encryption/key_metadata.cc         |    89 -
 cpp/src/parquet/encryption/key_metadata.h          |    94 -
 cpp/src/parquet/encryption/key_metadata_test.cc    |    77 -
 cpp/src/parquet/encryption/key_toolkit.cc          |    52 -
 cpp/src/parquet/encryption/key_toolkit.h           |    76 -
 cpp/src/parquet/encryption/key_toolkit_internal.cc |    80 -
 cpp/src/parquet/encryption/key_toolkit_internal.h  |    58 -
 cpp/src/parquet/encryption/key_wrapping_test.cc    |   103 -
 cpp/src/parquet/encryption/kms_client.cc           |    44 -
 cpp/src/parquet/encryption/kms_client.h            |    95 -
 cpp/src/parquet/encryption/kms_client_factory.h    |    40 -
 .../parquet/encryption/local_wrap_kms_client.cc    |   116 -
 cpp/src/parquet/encryption/local_wrap_kms_client.h |    96 -
 cpp/src/parquet/encryption/properties_test.cc      |   276 -
 .../parquet/encryption/read_configurations_test.cc |   272 -
 cpp/src/parquet/encryption/test_encryption_util.cc |   482 -
 cpp/src/parquet/encryption/test_encryption_util.h  |   113 -
 cpp/src/parquet/encryption/test_in_memory_kms.cc   |    81 -
 cpp/src/parquet/encryption/test_in_memory_kms.h    |    89 -
 .../encryption/two_level_cache_with_expiration.h   |   159 -
 .../two_level_cache_with_expiration_test.cc        |   177 -
 .../encryption/write_configurations_test.cc        |   234 -
 cpp/src/parquet/exception.cc                       |    27 -
 cpp/src/parquet/exception.h                        |   159 -
 cpp/src/parquet/file_deserialize_test.cc           |   372 -
 cpp/src/parquet/file_reader.cc                     |   665 -
 cpp/src/parquet/file_reader.h                      |   151 -
 cpp/src/parquet/file_serialize_test.cc             |   471 -
 cpp/src/parquet/file_writer.cc                     |   547 -
 cpp/src/parquet/file_writer.h                      |   234 -
 cpp/src/parquet/hasher.h                           |    72 -
 cpp/src/parquet/level_comparison.cc                |    82 -
 cpp/src/parquet/level_comparison.h                 |    40 -
 cpp/src/parquet/level_comparison_avx2.cc           |    34 -
 cpp/src/parquet/level_comparison_inc.h             |    65 -
 cpp/src/parquet/level_conversion.cc                |   183 -
 cpp/src/parquet/level_conversion.h                 |   199 -
 cpp/src/parquet/level_conversion_benchmark.cc      |    80 -
 cpp/src/parquet/level_conversion_bmi2.cc           |    33 -
 cpp/src/parquet/level_conversion_inc.h             |   357 -
 cpp/src/parquet/level_conversion_test.cc           |   361 -
 cpp/src/parquet/metadata.cc                        |  1783 -
 cpp/src/parquet/metadata.h                         |   484 -
 cpp/src/parquet/metadata_test.cc                   |   558 -
 cpp/src/parquet/murmur3.cc                         |   222 -
 cpp/src/parquet/murmur3.h                          |    54 -
 cpp/src/parquet/parquet.pc.in                      |    31 -
 cpp/src/parquet/parquet.thrift                     |  1058 -
 cpp/src/parquet/parquet_version.h.in               |    31 -
 cpp/src/parquet/pch.h                              |    28 -
 cpp/src/parquet/platform.cc                        |    41 -
 cpp/src/parquet/platform.h                         |   111 -
 cpp/src/parquet/printer.cc                         |   297 -
 cpp/src/parquet/printer.h                          |    46 -
 cpp/src/parquet/properties.cc                      |    64 -
 cpp/src/parquet/properties.h                       |   801 -
 cpp/src/parquet/properties_test.cc                 |    90 -
 cpp/src/parquet/public_api_test.cc                 |    49 -
 cpp/src/parquet/reader_test.cc                     |   629 -
 cpp/src/parquet/schema.cc                          |   946 -
 cpp/src/parquet/schema.h                           |   496 -
 cpp/src/parquet/schema_internal.h                  |    54 -
 cpp/src/parquet/schema_test.cc                     |  2226 --
 cpp/src/parquet/statistics.cc                      |   848 -
 cpp/src/parquet/statistics.h                       |   342 -
 cpp/src/parquet/statistics_test.cc                 |  1140 -
 cpp/src/parquet/stream_reader.cc                   |   521 -
 cpp/src/parquet/stream_reader.h                    |   299 -
 cpp/src/parquet/stream_reader_test.cc              |   916 -
 cpp/src/parquet/stream_writer.cc                   |   324 -
 cpp/src/parquet/stream_writer.h                    |   243 -
 cpp/src/parquet/stream_writer_test.cc              |   419 -
 cpp/src/parquet/symbols.map                        |    40 -
 cpp/src/parquet/test_util.cc                       |   136 -
 cpp/src/parquet/test_util.h                        |   709 -
 cpp/src/parquet/thrift_internal.h                  |   490 -
 cpp/src/parquet/type_fwd.h                         |    43 -
 cpp/src/parquet/types.cc                           |  1572 -
 cpp/src/parquet/types.h                            |   716 -
 cpp/src/parquet/types_test.cc                      |   172 -
 cpp/src/parquet/windows_compatibility.h            |    30 -
 cpp/src/plasma/.gitignore                          |    18 -
 cpp/src/plasma/CMakeLists.txt                      |   238 -
 cpp/src/plasma/PlasmaConfig.cmake.in               |    39 -
 cpp/src/plasma/client.cc                           |  1224 -
 cpp/src/plasma/client.h                            |   309 -
 cpp/src/plasma/common.cc                           |   195 -
 cpp/src/plasma/common.fbs                          |    39 -
 cpp/src/plasma/common.h                            |   155 -
 cpp/src/plasma/common_generated.h                  |   230 -
 cpp/src/plasma/compat.h                            |    32 -
 cpp/src/plasma/dlmalloc.cc                         |   166 -
 cpp/src/plasma/events.cc                           |   107 -
 cpp/src/plasma/events.h                            |   108 -
 cpp/src/plasma/eviction_policy.cc                  |   175 -
 cpp/src/plasma/eviction_policy.h                   |   209 -
 cpp/src/plasma/external_store.cc                   |    63 -
 cpp/src/plasma/external_store.h                    |   120 -
 cpp/src/plasma/fling.cc                            |   129 -
 cpp/src/plasma/fling.h                             |    52 -
 cpp/src/plasma/hash_table_store.cc                 |    58 -
 cpp/src/plasma/hash_table_store.h                  |    50 -
 cpp/src/plasma/io.cc                               |   250 -
 cpp/src/plasma/io.h                                |    67 -
 .../org_apache_arrow_plasma_PlasmaClientJNI.cc     |   263 -
 .../java/org_apache_arrow_plasma_PlasmaClientJNI.h |   141 -
 cpp/src/plasma/malloc.cc                           |    70 -
 cpp/src/plasma/malloc.h                            |    51 -
 cpp/src/plasma/plasma.cc                           |    99 -
 cpp/src/plasma/plasma.fbs                          |   357 -
 cpp/src/plasma/plasma.h                            |   175 -
 cpp/src/plasma/plasma.pc.in                        |    33 -
 cpp/src/plasma/plasma_allocator.cc                 |    56 -
 cpp/src/plasma/plasma_allocator.h                  |    61 -
 cpp/src/plasma/plasma_generated.h                  |  3984 ---
 cpp/src/plasma/protocol.cc                         |   829 -
 cpp/src/plasma/protocol.h                          |   251 -
 cpp/src/plasma/quota_aware_policy.cc               |   177 -
 cpp/src/plasma/quota_aware_policy.h                |    88 -
 cpp/src/plasma/store.cc                            |  1353 -
 cpp/src/plasma/store.h                             |   245 -
 cpp/src/plasma/symbols.map                         |    34 -
 cpp/src/plasma/test/client_tests.cc                |  1084 -
 cpp/src/plasma/test/external_store_tests.cc        |   143 -
 cpp/src/plasma/test/serialization_tests.cc         |   333 -
 cpp/src/plasma/test_util.h                         |    46 -
 cpp/src/plasma/thirdparty/ae/ae.c                  |   465 -
 cpp/src/plasma/thirdparty/ae/ae.h                  |   121 -
 cpp/src/plasma/thirdparty/ae/ae_epoll.c            |   137 -
 cpp/src/plasma/thirdparty/ae/ae_evport.c           |   320 -
 cpp/src/plasma/thirdparty/ae/ae_kqueue.c           |   138 -
 cpp/src/plasma/thirdparty/ae/ae_select.c           |   106 -
 cpp/src/plasma/thirdparty/ae/config.h              |    52 -
 cpp/src/plasma/thirdparty/ae/zmalloc.h             |    43 -
 cpp/src/plasma/thirdparty/dlmalloc.c               |  6296 ----
 cpp/submodules/parquet-testing                     |     1 -
 cpp/thirdparty/README.md                           |    25 -
 cpp/thirdparty/download_dependencies.sh            |    63 -
 .../flatbuffers/include/flatbuffers/base.h         |   398 -
 .../flatbuffers/include/flatbuffers/flatbuffers.h  |  2783 --
 .../include/flatbuffers/stl_emulation.h            |   307 -
 cpp/thirdparty/hadoop/include/hdfs.h               |  1024 -
 cpp/thirdparty/versions.txt                        |    92 -
 cpp/tools/parquet/CMakeLists.txt                   |    36 -
 cpp/tools/parquet/parquet_dump_schema.cc           |    52 -
 cpp/tools/parquet/parquet_reader.cc                |    82 -
 cpp/tools/parquet/parquet_scan.cc                  |    78 -
 cpp/valgrind.supp                                  |    53 -
 cpp/vcpkg.json                                     |    40 -
 csharp/.editorconfig                               |   169 -
 csharp/.gitattributes                              |    36 -
 csharp/.gitignore                                  |   267 -
 csharp/Apache.Arrow.sln                            |    61 -
 csharp/ApacheArrow.snk                             |   Bin 596 -> 0 bytes
 csharp/Directory.Build.props                       |    59 -
 csharp/Directory.Build.targets                     |    29 -
 csharp/README.md                                   |   184 -
 csharp/examples/Examples.sln                       |    31 -
 .../FluentBuilderExample.csproj                    |    12 -
 csharp/examples/FluentBuilderExample/Program.cs    |    61 -
 .../Apache.Arrow.Flight.AspNetCore.csproj          |    15 -
 .../FlightIEndpointRouteBuilderExtensions.cs       |    28 -
 .../FlightIGrpcServerBuilderExtensions.cs          |    30 -
 .../Apache.Arrow.Flight/Apache.Arrow.Flight.csproj |    21 -
 .../src/Apache.Arrow.Flight/Client/FlightClient.cs |   120 -
 .../Client/FlightClientRecordBatchStreamReader.cs  |    28 -
 .../Client/FlightClientRecordBatchStreamWriter.cs  |    56 -
 .../Client/FlightRecordBatchDuplexStreamingCall.cs |    93 -
 .../Client/FlightRecordBatchStreamingCall.cs       |    83 -
 csharp/src/Apache.Arrow.Flight/FlightAction.cs     |    75 -
 csharp/src/Apache.Arrow.Flight/FlightActionType.cs |    61 -
 csharp/src/Apache.Arrow.Flight/FlightCriteria.cs   |    70 -
 csharp/src/Apache.Arrow.Flight/FlightDescriptor.cs |   102 -
 .../Apache.Arrow.Flight/FlightDescriptorType.cs    |    23 -
 csharp/src/Apache.Arrow.Flight/FlightEndpoint.cs   |    73 -
 csharp/src/Apache.Arrow.Flight/FlightInfo.cs       |    78 -
 csharp/src/Apache.Arrow.Flight/FlightLocation.cs   |    59 -
 csharp/src/Apache.Arrow.Flight/FlightPutResult.cs  |    64 -
 .../FlightRecordBatchStreamReader.cs               |   104 -
 .../FlightRecordBatchStreamWriter.cs               |    77 -
 csharp/src/Apache.Arrow.Flight/FlightResult.cs     |    71 -
 csharp/src/Apache.Arrow.Flight/FlightTicket.cs     |    70 -
 .../Internal/FlightDataStream.cs                   |   109 -
 .../Internal/FlightMessageSerializer.cs            |    57 -
 .../Internal/RecordBatcReaderImplementation.cs     |   131 -
 .../Apache.Arrow.Flight/Internal/SchemaWriter.cs   |    55 -
 .../Apache.Arrow.Flight/Internal/StreamReader.cs   |    54 -
 .../Apache.Arrow.Flight/Internal/StreamWriter.cs   |    51 -
 .../Apache.Arrow.Flight/Properties/AssemblyInfo.cs |    18 -
 .../src/Apache.Arrow.Flight/Server/FlightServer.cs |    61 -
 .../Server/FlightServerRecordBatchStreamReader.cs  |    31 -
 .../Server/FlightServerRecordBatchStreamWriter.cs  |    31 -
 .../Server/Internal/FlightServerImplementation.cs  |   100 -
 csharp/src/Apache.Arrow/Apache.Arrow.csproj        |    42 -
 csharp/src/Apache.Arrow/Arrays/Array.cs            |    91 -
 csharp/src/Apache.Arrow/Arrays/ArrayData.cs        |    92 -
 .../Arrays/ArrowArrayBuilderFactory.cs             |    79 -
 .../src/Apache.Arrow/Arrays/ArrowArrayFactory.cs   |    81 -
 .../src/Apache.Arrow/Arrays/ArrowArrayVisitor.cs   |    22 -
 csharp/src/Apache.Arrow/Arrays/BinaryArray.cs      |   358 -
 csharp/src/Apache.Arrow/Arrays/BooleanArray.cs     |   194 -
 csharp/src/Apache.Arrow/Arrays/Date32Array.cs      |   112 -
 csharp/src/Apache.Arrow/Arrays/Date64Array.cs      |   117 -
 csharp/src/Apache.Arrow/Arrays/DateArrayBuilder.cs |   209 -
 csharp/src/Apache.Arrow/Arrays/Decimal128Array.cs  |    95 -
 csharp/src/Apache.Arrow/Arrays/Decimal256Array.cs  |    96 -
 .../Apache.Arrow/Arrays/DelegatingArrayBuilder.cs  |   102 -
 csharp/src/Apache.Arrow/Arrays/DoubleArray.cs      |    45 -
 .../Apache.Arrow/Arrays/FixedSizeBinaryArray.cs    |   196 -
 csharp/src/Apache.Arrow/Arrays/FloatArray.cs       |    45 -
 csharp/src/Apache.Arrow/Arrays/Int16Array.cs       |    46 -
 csharp/src/Apache.Arrow/Arrays/Int32Array.cs       |    46 -
 csharp/src/Apache.Arrow/Arrays/Int64Array.cs       |    46 -
 csharp/src/Apache.Arrow/Arrays/Int8Array.cs        |    46 -
 csharp/src/Apache.Arrow/Arrays/ListArray.cs        |   200 -
 csharp/src/Apache.Arrow/Arrays/PrimitiveArray.cs   |    70 -
 .../Apache.Arrow/Arrays/PrimitiveArrayBuilder.cs   |   201 -
 csharp/src/Apache.Arrow/Arrays/StringArray.cs      |    95 -
 csharp/src/Apache.Arrow/Arrays/StructArray.cs      |    59 -
 csharp/src/Apache.Arrow/Arrays/TimestampArray.cs   |   149 -
 csharp/src/Apache.Arrow/Arrays/UInt16Array.cs      |    46 -
 csharp/src/Apache.Arrow/Arrays/UInt32Array.cs      |    46 -
 csharp/src/Apache.Arrow/Arrays/UInt64Array.cs      |    46 -
 csharp/src/Apache.Arrow/Arrays/UInt8Array.cs       |    45 -
 csharp/src/Apache.Arrow/Arrays/UnionArray.cs       |    51 -
 .../src/Apache.Arrow/ArrowBuffer.BitmapBuilder.cs  |   280 -
 csharp/src/Apache.Arrow/ArrowBuffer.Builder.cs     |   255 -
 csharp/src/Apache.Arrow/ArrowBuffer.cs             |    76 -
 csharp/src/Apache.Arrow/BitUtility.cs              |   204 -
 csharp/src/Apache.Arrow/ChunkedArray.cs            |    91 -
 csharp/src/Apache.Arrow/Column.cs                  |    73 -
 csharp/src/Apache.Arrow/DecimalUtility.cs          |   162 -
 .../Apache.Arrow/Extensions/ArrayDataExtensions.cs |    45 -
 .../Apache.Arrow/Extensions/ArrayPoolExtensions.cs |    63 -
 .../Apache.Arrow/Extensions/ArrowTypeExtensions.cs |    42 -
 .../Apache.Arrow/Extensions/FlatbufExtensions.cs   |    85 -
 .../src/Apache.Arrow/Extensions/SpanExtensions.cs  |    31 -
 .../Apache.Arrow/Extensions/StreamExtensions.cs    |    70 -
 .../Extensions/StreamExtensions.netcoreapp2.1.cs   |    34 -
 .../Extensions/StreamExtensions.netstandard.cs     |   124 -
 .../Apache.Arrow/Extensions/TimeSpanExtensions.cs  |    35 -
 .../Extensions/TupleExtensions.netstandard.cs      |    29 -
 csharp/src/Apache.Arrow/Field.Builder.cs           |    93 -
 csharp/src/Apache.Arrow/Field.cs                   |    65 -
 csharp/src/Apache.Arrow/Flatbuf/Block.cs           |    37 -
 csharp/src/Apache.Arrow/Flatbuf/BodyCompression.cs |    47 -
 csharp/src/Apache.Arrow/Flatbuf/Buffer.cs          |    36 -
 csharp/src/Apache.Arrow/Flatbuf/DictionaryBatch.cs |    54 -
 .../src/Apache.Arrow/Flatbuf/DictionaryEncoding.cs |    57 -
 .../Flatbuf/Enums/BodyCompressionMethod.cs         |    24 -
 .../Apache.Arrow/Flatbuf/Enums/CompressionType.cs  |    15 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/DateUnit.cs  |    15 -
 .../src/Apache.Arrow/Flatbuf/Enums/Endianness.cs   |    17 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/Feature.cs   |    39 -
 .../src/Apache.Arrow/Flatbuf/Enums/IntervalUnit.cs |    15 -
 .../Apache.Arrow/Flatbuf/Enums/MessageHeader.cs    |    26 -
 .../Apache.Arrow/Flatbuf/Enums/MetadataVersion.cs  |    29 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/Precision.cs |    16 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/TimeUnit.cs  |    17 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/Type.cs      |    38 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/UnionMode.cs |    15 -
 csharp/src/Apache.Arrow/Flatbuf/Field.cs           |    83 -
 csharp/src/Apache.Arrow/Flatbuf/FieldNode.cs       |    44 -
 csharp/src/Apache.Arrow/Flatbuf/FixedSizeBinary.cs |    39 -
 csharp/src/Apache.Arrow/Flatbuf/FixedSizeList.cs   |    39 -
 .../Apache.Arrow/Flatbuf/FlatBuffers/ByteBuffer.cs |   891 -
 .../Flatbuf/FlatBuffers/ByteBufferUtil.cs          |    39 -
 .../Flatbuf/FlatBuffers/FlatBufferBuilder.cs       |   812 -
 .../Flatbuf/FlatBuffers/FlatBufferConstants.cs     |    29 -
 .../Flatbuf/FlatBuffers/IFlatbufferObject.cs       |    28 -
 .../src/Apache.Arrow/Flatbuf/FlatBuffers/Offset.cs |    48 -
 .../src/Apache.Arrow/Flatbuf/FlatBuffers/Struct.cs |    27 -
 .../src/Apache.Arrow/Flatbuf/FlatBuffers/Table.cs  |   195 -
 csharp/src/Apache.Arrow/Flatbuf/Footer.cs          |    68 -
 csharp/src/Apache.Arrow/Flatbuf/KeyValue.cs        |    57 -
 csharp/src/Apache.Arrow/Flatbuf/Map.cs             |    63 -
 csharp/src/Apache.Arrow/Flatbuf/Message.cs         |    60 -
 csharp/src/Apache.Arrow/Flatbuf/RecordBatch.cs     |    67 -
 csharp/src/Apache.Arrow/Flatbuf/Schema.cs          |    76 -
 csharp/src/Apache.Arrow/Flatbuf/Tensor.cs          |    60 -
 csharp/src/Apache.Arrow/Flatbuf/TensorDim.cs       |    53 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Binary.cs    |    29 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Bool.cs      |    29 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Date.cs      |    44 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Decimal.cs   |    54 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Duration.cs  |    38 -
 .../Apache.Arrow/Flatbuf/Types/FloatingPoint.cs    |    38 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Int.cs       |    42 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Interval.cs  |    38 -
 .../src/Apache.Arrow/Flatbuf/Types/LargeBinary.cs  |    31 -
 csharp/src/Apache.Arrow/Flatbuf/Types/LargeList.cs |    31 -
 csharp/src/Apache.Arrow/Flatbuf/Types/LargeUtf8.cs |    31 -
 csharp/src/Apache.Arrow/Flatbuf/Types/List.cs      |    29 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Null.cs      |    30 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Struct_.cs   |    32 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Time.cs      |    45 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Timestamp.cs |    74 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Union.cs     |    56 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Utf8.cs      |    30 -
 csharp/src/Apache.Arrow/Interfaces/IArrowArray.cs  |    40 -
 .../Apache.Arrow/Interfaces/IArrowArrayBuilder.cs  |    54 -
 .../Apache.Arrow/Interfaces/IArrowArrayVisitor.cs  |    30 -
 csharp/src/Apache.Arrow/Ipc/ArrowFileConstants.cs  |    24 -
 csharp/src/Apache.Arrow/Ipc/ArrowFileReader.cs     |    69 -
 .../Ipc/ArrowFileReaderImplementation.cs           |   308 -
 csharp/src/Apache.Arrow/Ipc/ArrowFileWriter.cs     |   281 -
 csharp/src/Apache.Arrow/Ipc/ArrowFooter.cs         |    96 -
 .../Ipc/ArrowMemoryReaderImplementation.cs         |   118 -
 .../Apache.Arrow/Ipc/ArrowReaderImplementation.cs  |   290 -
 csharp/src/Apache.Arrow/Ipc/ArrowStreamReader.cs   |    90 -
 .../Ipc/ArrowStreamReaderImplementation.cs         |   262 -
 csharp/src/Apache.Arrow/Ipc/ArrowStreamWriter.cs   |   726 -
 .../Apache.Arrow/Ipc/ArrowTypeFlatbufferBuilder.cs |   249 -
 csharp/src/Apache.Arrow/Ipc/Block.cs               |    40 -
 csharp/src/Apache.Arrow/Ipc/IArrowReader.cs        |    26 -
 csharp/src/Apache.Arrow/Ipc/IpcOptions.cs          |    37 -
 csharp/src/Apache.Arrow/Ipc/MessageSerializer.cs   |   177 -
 .../Ipc/ReadOnlyMemoryBufferAllocator.cs           |    39 -
 csharp/src/Apache.Arrow/Memory/MemoryAllocator.cs  |    81 -
 .../Apache.Arrow/Memory/NativeMemoryAllocator.cs   |    51 -
 .../src/Apache.Arrow/Memory/NativeMemoryManager.cs |    83 -
 csharp/src/Apache.Arrow/Memory/NullMemoryOwner.cs  |    29 -
 csharp/src/Apache.Arrow/Properties/AssembyInfo.cs  |    18 -
 .../Apache.Arrow/Properties/Resources.Designer.cs  |    73 -
 csharp/src/Apache.Arrow/Properties/Resources.resx  |   123 -
 csharp/src/Apache.Arrow/RecordBatch.Builder.cs     |   167 -
 csharp/src/Apache.Arrow/RecordBatch.cs             |    88 -
 csharp/src/Apache.Arrow/Schema.Builder.cs          |    92 -
 csharp/src/Apache.Arrow/Schema.cs                  |   125 -
 csharp/src/Apache.Arrow/Table.cs                   |   113 -
 csharp/src/Apache.Arrow/Types/ArrowType.cs         |    43 -
 csharp/src/Apache.Arrow/Types/BinaryType.cs        |    28 -
 csharp/src/Apache.Arrow/Types/BooleanType.cs       |    30 -
 csharp/src/Apache.Arrow/Types/Date32Type.cs        |    30 -
 csharp/src/Apache.Arrow/Types/Date64Type.cs        |    30 -
 csharp/src/Apache.Arrow/Types/DateType.cs          |    29 -
 csharp/src/Apache.Arrow/Types/Decimal128Type.cs    |    35 -
 csharp/src/Apache.Arrow/Types/Decimal256Type.cs    |    35 -
 csharp/src/Apache.Arrow/Types/DoubleType.cs        |    31 -
 .../src/Apache.Arrow/Types/FixedSizeBinaryType.cs  |    38 -
 csharp/src/Apache.Arrow/Types/FixedWidthType.cs    |    25 -
 csharp/src/Apache.Arrow/Types/FloatType.cs         |    31 -
 csharp/src/Apache.Arrow/Types/FloatingPointType.cs |    30 -
 csharp/src/Apache.Arrow/Types/HalfFloatType.cs     |    31 -
 csharp/src/Apache.Arrow/Types/IArrowType.cs        |    63 -
 csharp/src/Apache.Arrow/Types/IArrowTypeVisitor.cs |    29 -
 csharp/src/Apache.Arrow/Types/Int16Type.cs         |    29 -
 csharp/src/Apache.Arrow/Types/Int32Type.cs         |    29 -
 csharp/src/Apache.Arrow/Types/Int64Type.cs         |    29 -
 csharp/src/Apache.Arrow/Types/Int8Type.cs          |    30 -
 csharp/src/Apache.Arrow/Types/IntervalUnit.cs      |    40 -
 csharp/src/Apache.Arrow/Types/ListType.cs          |    37 -
 csharp/src/Apache.Arrow/Types/NestedType.cs        |    46 -
 csharp/src/Apache.Arrow/Types/NullType.cs          |    28 -
 csharp/src/Apache.Arrow/Types/NumberType.cs        |    23 -
 csharp/src/Apache.Arrow/Types/StringType.cs        |    28 -
 csharp/src/Apache.Arrow/Types/StructType.cs        |    61 -
 csharp/src/Apache.Arrow/Types/Time32Type.cs        |    32 -
 csharp/src/Apache.Arrow/Types/Time64Type.cs        |    32 -
 csharp/src/Apache.Arrow/Types/TimeType.cs          |    36 -
 csharp/src/Apache.Arrow/Types/TimestampType.cs     |    52 -
 csharp/src/Apache.Arrow/Types/UInt16Type.cs        |    29 -
 csharp/src/Apache.Arrow/Types/UInt32Type.cs        |    29 -
 csharp/src/Apache.Arrow/Types/UInt64Type.cs        |    29 -
 csharp/src/Apache.Arrow/Types/UInt8Type.cs         |    29 -
 csharp/src/Apache.Arrow/Types/UnionType.cs         |    46 -
 csharp/src/Apache.Arrow/Utility.cs                 |    87 -
 .../Apache.Arrow.Benchmarks.csproj                 |    18 -
 .../ArrowReaderBenchmark.cs                        |   160 -
 .../ArrowWriterBenchmark.cs                        |    58 -
 csharp/test/Apache.Arrow.Benchmarks/Program.cs     |    29 -
 .../Apache.Arrow.Flight.TestWeb.csproj             |    15 -
 .../Extensions/AsyncStreamExtensions.cs            |    39 -
 .../Apache.Arrow.Flight.TestWeb/FlightHolder.cs    |    62 -
 .../Apache.Arrow.Flight.TestWeb/FlightStore.cs     |    27 -
 csharp/test/Apache.Arrow.Flight.TestWeb/Program.cs |    52 -
 .../Properties/launchSettings.json                 |    12 -
 .../RecordBatchWithMetadata.cs                     |    31 -
 csharp/test/Apache.Arrow.Flight.TestWeb/Startup.cs |    61 -
 .../TestFlightServer.cs                            |   116 -
 .../appsettings.Development.json                   |    10 -
 .../Apache.Arrow.Flight.TestWeb/appsettings.json   |    15 -
 .../Apache.Arrow.Flight.Tests.csproj               |    21 -
 .../FlightInfoComparer.cs                          |    39 -
 .../test/Apache.Arrow.Flight.Tests/FlightTests.cs  |   316 -
 .../Apache.Arrow.Flight.Tests/TestWebFactory.cs    |    79 -
 .../Apache.Arrow.Tests/Apache.Arrow.Tests.csproj   |    22 -
 .../test/Apache.Arrow.Tests/ArrayBuilderTests.cs   |   198 -
 .../test/Apache.Arrow.Tests/ArrayTypeComparer.cs   |   121 -
 csharp/test/Apache.Arrow.Tests/ArrowArrayTests.cs  |   274 -
 .../ArrowBufferBitmapBuilderTests.cs               |   493 -
 .../Apache.Arrow.Tests/ArrowBufferBuilderTests.cs  |   216 -
 csharp/test/Apache.Arrow.Tests/ArrowBufferTests.cs |   114 -
 .../Apache.Arrow.Tests/ArrowFileReaderTests.cs     |   161 -
 .../Apache.Arrow.Tests/ArrowFileWriterTests.cs     |   118 -
 .../test/Apache.Arrow.Tests/ArrowReaderVerifier.cs |   222 -
 .../Apache.Arrow.Tests/ArrowStreamReaderTests.cs   |   238 -
 .../Apache.Arrow.Tests/ArrowStreamWriterTests.cs   |   498 -
 .../Apache.Arrow.Tests/BinaryArrayBuilderTests.cs  |   489 -
 csharp/test/Apache.Arrow.Tests/BitUtilityTests.cs  |   171 -
 .../test/Apache.Arrow.Tests/BooleanArrayTests.cs   |   222 -
 csharp/test/Apache.Arrow.Tests/ColumnTests.cs      |    58 -
 csharp/test/Apache.Arrow.Tests/Date32ArrayTests.cs |   125 -
 csharp/test/Apache.Arrow.Tests/Date64ArrayTests.cs |   133 -
 .../Apache.Arrow.Tests/Decimal128ArrayTests.cs     |   241 -
 .../Apache.Arrow.Tests/Decimal256ArrayTests.cs     |   241 -
 .../test/Apache.Arrow.Tests/DecimalUtilityTests.cs |    51 -
 .../Extensions/DateTimeOffsetExtensions.cs         |    40 -
 csharp/test/Apache.Arrow.Tests/FieldComparer.cs    |    44 -
 .../Fixtures/DefaultMemoryAllocatorFixture.cs      |    31 -
 .../test/Apache.Arrow.Tests/SchemaBuilderTests.cs  |   156 -
 csharp/test/Apache.Arrow.Tests/SchemaComparer.cs   |    46 -
 csharp/test/Apache.Arrow.Tests/StructArrayTests.cs |   144 -
 csharp/test/Apache.Arrow.Tests/TableTests.cs       |    83 -
 csharp/test/Apache.Arrow.Tests/TestData.cs         |   280 -
 .../test/Apache.Arrow.Tests/TestDateAndTimeData.cs |    83 -
 .../test/Apache.Arrow.Tests/TestMemoryAllocator.cs |    29 -
 csharp/test/Apache.Arrow.Tests/TypeTests.cs        |   131 -
 csharp/test/Directory.Build.props                  |    26 -
 docs/.gitignore                                    |    19 -
 docs/Makefile                                      |   247 -
 docs/README.md                                     |    30 -
 docs/environment.yml                               |    25 -
 docs/make.bat                                      |    52 -
 docs/requirements.txt                              |     5 -
 docs/source/_static/arrow.png                      |   Bin 21636 -> 0 bytes
 docs/source/_static/favicon.ico                    |   Bin 15086 -> 0 bytes
 docs/source/_static/theme_overrides.css            |    83 -
 docs/source/_templates/docs-sidebar.html           |    19 -
 docs/source/_templates/layout.html                 |     5 -
 docs/source/conf.py                                |   444 -
 docs/source/cpp/api.rst                            |    42 -
 docs/source/cpp/api/array.rst                      |    92 -
 docs/source/cpp/api/builder.rst                    |    56 -
 docs/source/cpp/api/c_abi.rst                      |    48 -
 docs/source/cpp/api/compute.rst                    |    56 -
 docs/source/cpp/api/cuda.rst                       |    74 -
 docs/source/cpp/api/dataset.rst                    |    71 -
 docs/source/cpp/api/datatype.rst                   |   151 -
 docs/source/cpp/api/filesystem.rst                 |    64 -
 docs/source/cpp/api/flight.rst                     |   202 -
 docs/source/cpp/api/formats.rst                    |    98 -
 docs/source/cpp/api/io.rst                         |    95 -
 docs/source/cpp/api/ipc.rst                        |    90 -
 docs/source/cpp/api/memory.rst                     |   124 -
 docs/source/cpp/api/scalar.rst                     |    38 -
 docs/source/cpp/api/support.rst                    |    57 -
 docs/source/cpp/api/table.rst                      |    45 -
 docs/source/cpp/api/tensor.rst                     |    57 -
 docs/source/cpp/api/utilities.rst                  |    52 -
 docs/source/cpp/arrays.rst                         |   214 -
 docs/source/cpp/cmake.rst                          |    72 -
 docs/source/cpp/compute.rst                        |   833 -
 docs/source/cpp/conventions.rst                    |   107 -
 docs/source/cpp/csv.rst                            |   172 -
 docs/source/cpp/dataset.rst                        |   403 -
 docs/source/cpp/datatypes.rst                      |    68 -
 docs/source/cpp/examples/cmake_minimal_build.rst   |    28 -
 .../cpp/examples/dataset_documentation_example.rst |    27 -
 docs/source/cpp/examples/index.rst                 |    27 -
 .../cpp/examples/row_columnar_conversion.rst       |    27 -
 .../source/cpp/examples/tuple_range_conversion.rst |   106 -
 docs/source/cpp/flight.rst                         |   119 -
 docs/source/cpp/getting_started.rst                |    40 -
 docs/source/cpp/index.rst                          |    32 -
 docs/source/cpp/io.rst                             |    87 -
 docs/source/cpp/ipc.rst                            |    75 -
 docs/source/cpp/json.rst                           |   128 -
 docs/source/cpp/memory.rst                         |   185 -
 docs/source/cpp/overview.rst                       |    97 -
 docs/source/cpp/parquet.rst                        |   432 -
 docs/source/cpp/tables.rst                         |    83 -
 docs/source/developers/archery.rst                 |    84 -
 docs/source/developers/benchmarks.rst              |   179 -
 docs/source/developers/contributing.rst            |   360 -
 docs/source/developers/cpp/building.rst            |   481 -
 docs/source/developers/cpp/conventions.rst         |    90 -
 docs/source/developers/cpp/development.rst         |   293 -
 docs/source/developers/cpp/fuzzing.rst             |    99 -
 docs/source/developers/cpp/index.rst               |    31 -
 docs/source/developers/cpp/windows.rst             |   416 -
 docs/source/developers/crossbow.rst                |   257 -
 docs/source/developers/docker.rst                  |   225 -
 docs/source/developers/documentation.rst           |   103 -
 docs/source/developers/python.rst                  |   575 -
 docs/source/example.gz                             |   Bin 41 -> 0 bytes
 docs/source/format/Arrow.graffle                   |   Bin 4142 -> 0 bytes
 docs/source/format/Arrow.png                       |   Bin 112671 -> 0 bytes
 docs/source/format/CDataInterface.rst              |   945 -
 docs/source/format/CStreamInterface.rst            |   218 -
 docs/source/format/Columnar.rst                    |  1215 -
 docs/source/format/Flight.rst                      |   152 -
 docs/source/format/Guidelines.rst                  |    24 -
 docs/source/format/IPC.rst                         |    24 -
 docs/source/format/Integration.rst                 |   398 -
 docs/source/format/Layout.rst                      |    24 -
 docs/source/format/Metadata.rst                    |    24 -
 docs/source/format/Other.rst                       |    63 -
 docs/source/format/README.md                       |    24 -
 docs/source/format/Versioning.rst                  |    70 -
 .../format/integration_json_examples/simple.json   |    98 -
 .../format/integration_json_examples/struct.json   |   201 -
 docs/source/index.rst                              |    82 -
 docs/source/java/index.rst                         |    30 -
 docs/source/java/ipc.rst                           |   187 -
 docs/source/java/vector.rst                        |   288 -
 docs/source/java/vector_schema_root.rst            |    74 -
 docs/source/python/api.rst                         |    40 -
 docs/source/python/api/arrays.rst                  |   122 -
 docs/source/python/api/compute.rst                 |   211 -
 docs/source/python/api/cuda.rst                    |    62 -
 docs/source/python/api/dataset.rst                 |    60 -
 docs/source/python/api/datatypes.rst               |   155 -
 docs/source/python/api/files.rst                   |    65 -
 docs/source/python/api/filesystems.rst             |    53 -
 docs/source/python/api/flight.rst                  |    91 -
 docs/source/python/api/formats.rst                 |    97 -
 docs/source/python/api/ipc.rst                     |    68 -
 docs/source/python/api/memory.rst                  |    72 -
 docs/source/python/api/misc.rst                    |    40 -
 docs/source/python/api/plasma.rst                  |    33 -
 docs/source/python/api/tables.rst                  |    55 -
 docs/source/python/benchmarks.rst                  |    56 -
 docs/source/python/compute.rst                     |    55 -
 docs/source/python/csv.rst                         |   122 -
 docs/source/python/cuda.rst                        |   159 -
 docs/source/python/data.rst                        |   433 -
 docs/source/python/dataset.rst                     |   474 -
 docs/source/python/extending.rst                   |   468 -
 docs/source/python/extending_types.rst             |   325 -
 docs/source/python/feather.rst                     |   109 -
 docs/source/python/filesystems.rst                 |   208 -
 docs/source/python/filesystems_deprecated.rst      |    95 -
 docs/source/python/getting_involved.rst            |    35 -
 docs/source/python/index.rst                       |    57 -
 docs/source/python/install.rst                     |    90 -
 docs/source/python/ipc.rst                         |   316 -
 docs/source/python/json.rst                        |   117 -
 docs/source/python/memory.rst                      |   298 -
 docs/source/python/numpy.rst                       |    75 -
 docs/source/python/pandas.rst                      |   309 -
 docs/source/python/parquet.rst                     |   590 -
 docs/source/python/plasma.rst                      |   464 -
 docs/source/python/timestamps.rst                  |   198 -
 docs/source/status.rst                             |   237 -
 go/README.md                                       |   124 -
 go/arrow/.editorconfig                             |    21 -
 go/arrow/.gitignore                                |    35 -
 go/arrow/Gopkg.lock                                |    44 -
 go/arrow/Gopkg.toml                                |    23 -
 go/arrow/LICENSE.txt                               |  1987 --
 go/arrow/Makefile                                  |    54 -
 go/arrow/_examples/helloworld/main.go              |    32 -
 go/arrow/_tools/tmpl/main.go                       |   267 -
 go/arrow/_tools/tmpl/main_test.go                  |    73 -
 go/arrow/array/array.go                            |   208 -
 go/arrow/array/array_test.go                       |   301 -
 go/arrow/array/binary.go                           |   134 -
 go/arrow/array/binary_test.go                      |   430 -
 go/arrow/array/binarybuilder.go                    |   217 -
 go/arrow/array/binarybuilder_test.go               |    87 -
 go/arrow/array/boolean.go                          |    95 -
 go/arrow/array/boolean_test.go                     |   288 -
 go/arrow/array/booleanbuilder.go                   |   165 -
 go/arrow/array/booleanbuilder_test.go              |    90 -
 go/arrow/array/bufferbuilder.go                    |   127 -
 go/arrow/array/bufferbuilder_byte.go               |    30 -
 go/arrow/array/bufferbuilder_numeric.gen.go        |    58 -
 go/arrow/array/bufferbuilder_numeric.gen.go.tmpl   |    61 -
 go/arrow/array/bufferbuilder_numeric_test.go       |   106 -
 go/arrow/array/builder.go                          |   289 -
 go/arrow/array/builder_test.go                     |    83 -
 go/arrow/array/compare.go                          |   474 -
 go/arrow/array/compare_test.go                     |   531 -
 go/arrow/array/data.go                             |   179 -
 go/arrow/array/data_test.go                        |    51 -
 go/arrow/array/decimal128.go                       |   235 -
 go/arrow/array/decimal128_test.go                  |   179 -
 go/arrow/array/doc.go                              |    20 -
 go/arrow/array/fixed_size_list.go                  |   240 -
 go/arrow/array/fixed_size_list_test.go             |   215 -
 go/arrow/array/fixedsize_binary.go                 |    95 -
 go/arrow/array/fixedsize_binary_test.go            |   111 -
 go/arrow/array/fixedsize_binarybuilder.go          |   154 -
 go/arrow/array/fixedsize_binarybuilder_test.go     |   107 -
 go/arrow/array/float16.go                          |    87 -
 go/arrow/array/float16_builder.go                  |   165 -
 go/arrow/array/float16_builder_test.go             |   119 -
 go/arrow/array/interval.go                         |   434 -
 go/arrow/array/interval_test.go                    |   276 -
 go/arrow/array/list.go                             |   269 -
 go/arrow/array/list_test.go                        |   213 -
 go/arrow/array/null.go                             |   140 -
 go/arrow/array/null_test.go                        |    77 -
 go/arrow/array/numeric.gen.go                      |  1098 -
 go/arrow/array/numeric.gen.go.tmpl                 |    95 -
 go/arrow/array/numeric_test.go                     |   616 -
 go/arrow/array/numericbuilder.gen.go               |  2227 --
 go/arrow/array/numericbuilder.gen.go.tmpl          |   182 -
 go/arrow/array/numericbuilder.gen_test.go          |  2700 --
 go/arrow/array/numericbuilder.gen_test.go.tmpl     |   216 -
 go/arrow/array/record.go                           |   345 -
 go/arrow/array/record_test.go                      |   709 -
 go/arrow/array/string.go                           |   205 -
 go/arrow/array/string_test.go                      |   183 -
 go/arrow/array/struct.go                           |   278 -
 go/arrow/array/struct_test.go                      |   409 -
 go/arrow/array/table.go                            |   455 -
 go/arrow/array/table_test.go                       |   747 -
 go/arrow/array/util.go                             |    24 -
 go/arrow/arrio/arrio.go                            |    91 -
 go/arrow/arrio/arrio_test.go                       |   205 -
 go/arrow/bitutil/bitutil.go                        |   159 -
 go/arrow/bitutil/bitutil_test.go                   |   287 -
 go/arrow/compare.go                                |    79 -
 go/arrow/compare_test.go                           |   286 -
 go/arrow/csv/common.go                             |   174 -
 go/arrow/csv/reader.go                             |   531 -
 go/arrow/csv/reader_test.go                        |   604 -
 go/arrow/csv/testdata/header.csv                   |    21 -
 go/arrow/csv/testdata/simple.csv                   |    28 -
 go/arrow/csv/testdata/types.csv                    |    21 -
 go/arrow/csv/writer.go                             |   218 -
 go/arrow/csv/writer_test.go                        |   274 -
 go/arrow/datatype.go                               |   143 -
 go/arrow/datatype_binary.go                        |    41 -
 go/arrow/datatype_binary_test.go                   |    53 -
 go/arrow/datatype_fixedwidth.go                    |   213 -
 go/arrow/datatype_fixedwidth_test.go               |   297 -
 go/arrow/datatype_nested.go                        |   180 -
 go/arrow/datatype_nested_test.go                   |   356 -
 go/arrow/datatype_null.go                          |    29 -
 go/arrow/datatype_null_test.go                     |    38 -
 go/arrow/datatype_numeric.gen.go                   |   134 -
 go/arrow/datatype_numeric.gen.go.tmpl              |    40 -
 go/arrow/datatype_numeric.gen.go.tmpldata          |    66 -
 go/arrow/decimal128/decimal128.go                  |    73 -
 go/arrow/decimal128/decimal128_test.go             |    94 -
 go/arrow/doc.go                                    |    39 -
 go/arrow/endian/big.go                             |    25 -
 go/arrow/endian/little.go                          |    25 -
 go/arrow/example_test.go                           |   595 -
 go/arrow/flight/Flight.pb.go                       |  1473 -
 go/arrow/flight/Flight_grpc.pb.go                  |   877 -
 go/arrow/flight/basic_auth_flight_test.go          |   205 -
 go/arrow/flight/client.go                          |   129 -
 go/arrow/flight/client_auth.go                     |    91 -
 go/arrow/flight/example_flight_server_test.go      |    86 -
 go/arrow/flight/flight_test.go                     |   313 -
 go/arrow/flight/gen.go                             |    19 -
 go/arrow/flight/record_batch_reader.go             |    87 -
 go/arrow/flight/record_batch_writer.go             |    72 -
 go/arrow/flight/server.go                          |   118 -
 go/arrow/flight/server_auth.go                     |   229 -
 go/arrow/float16/float16.go                        |    70 -
 go/arrow/float16/float16_test.go                   |    45 -
 go/arrow/gen-flatbuffers.go                        |   122 -
 go/arrow/go.mod                                    |    38 -
 go/arrow/go.sum                                    |   110 -
 go/arrow/internal/arrdata/arrdata.go               |  1189 -
 go/arrow/internal/arrdata/ioutil.go                |   274 -
 go/arrow/internal/arrjson/arrjson.go               |  1501 -
 go/arrow/internal/arrjson/arrjson_test.go          |  3104 --
 go/arrow/internal/arrjson/option.go                |    57 -
 go/arrow/internal/arrjson/reader.go                |   100 -
 go/arrow/internal/arrjson/writer.go                |   116 -
 go/arrow/internal/cpu/README.md                    |    42 -
 go/arrow/internal/cpu/cpu.go                       |    77 -
 go/arrow/internal/cpu/cpu_s390x.go                 |     7 -
 go/arrow/internal/cpu/cpu_test.go                  |    51 -
 go/arrow/internal/cpu/cpu_x86.go                   |   107 -
 go/arrow/internal/cpu/cpu_x86.s                    |    32 -
 go/arrow/internal/debug/assert_off.go              |    24 -
 go/arrow/internal/debug/assert_on.go               |    28 -
 go/arrow/internal/debug/doc.go                     |    32 -
 go/arrow/internal/debug/log_off.go                 |    21 -
 go/arrow/internal/debug/log_on.go                  |    32 -
 go/arrow/internal/debug/util.go                    |    37 -
 go/arrow/internal/flatbuf/Binary.go                |    51 -
 go/arrow/internal/flatbuf/Block.go                 |    74 -
 go/arrow/internal/flatbuf/BodyCompression.go       |    87 -
 go/arrow/internal/flatbuf/BodyCompressionMethod.go |    52 -
 go/arrow/internal/flatbuf/Bool.go                  |    50 -
 go/arrow/internal/flatbuf/Buffer.go                |    73 -
 go/arrow/internal/flatbuf/CompressionType.go       |    45 -
 go/arrow/internal/flatbuf/Date.go                  |    71 -
 go/arrow/internal/flatbuf/DateUnit.go              |    45 -
 go/arrow/internal/flatbuf/Decimal.go               |   107 -
 go/arrow/internal/flatbuf/DictionaryBatch.go       |   108 -
 go/arrow/internal/flatbuf/DictionaryEncoding.go    |   135 -
 go/arrow/internal/flatbuf/DictionaryKind.go        |    47 -
 go/arrow/internal/flatbuf/Duration.go              |    65 -
 go/arrow/internal/flatbuf/Endianness.go            |    47 -
 go/arrow/internal/flatbuf/Feature.go               |    71 -
 go/arrow/internal/flatbuf/Field.go                 |   188 -
 go/arrow/internal/flatbuf/FieldNode.go             |    76 -
 go/arrow/internal/flatbuf/FixedSizeBinary.go       |    67 -
 go/arrow/internal/flatbuf/FixedSizeList.go         |    67 -
 go/arrow/internal/flatbuf/FloatingPoint.go         |    65 -
 go/arrow/internal/flatbuf/Footer.go                |   162 -
 go/arrow/internal/flatbuf/Int.go                   |    80 -
 go/arrow/internal/flatbuf/Interval.go              |    65 -
 go/arrow/internal/flatbuf/IntervalUnit.go          |    45 -
 go/arrow/internal/flatbuf/KeyValue.go              |    75 -
 go/arrow/internal/flatbuf/LargeBinary.go           |    52 -
 go/arrow/internal/flatbuf/LargeList.go             |    52 -
 go/arrow/internal/flatbuf/LargeUtf8.go             |    52 -
 go/arrow/internal/flatbuf/List.go                  |    50 -
 go/arrow/internal/flatbuf/Map.go                   |    92 -
 go/arrow/internal/flatbuf/Message.go               |   133 -
 go/arrow/internal/flatbuf/MessageHeader.go         |    65 -
 go/arrow/internal/flatbuf/MetadataVersion.go       |    65 -
 go/arrow/internal/flatbuf/Null.go                  |    51 -
 go/arrow/internal/flatbuf/Precision.go             |    48 -
 go/arrow/internal/flatbuf/RecordBatch.go           |   154 -
 go/arrow/internal/flatbuf/Schema.go                |   159 -
 .../internal/flatbuf/SparseMatrixCompressedAxis.go |    45 -
 go/arrow/internal/flatbuf/SparseMatrixIndexCSR.go  |   181 -
 go/arrow/internal/flatbuf/SparseMatrixIndexCSX.go  |   200 -
 go/arrow/internal/flatbuf/SparseTensor.go          |   175 -
 go/arrow/internal/flatbuf/SparseTensorIndex.go     |    51 -
 go/arrow/internal/flatbuf/SparseTensorIndexCOO.go  |   179 -
 go/arrow/internal/flatbuf/SparseTensorIndexCSF.go  |   291 -
 go/arrow/internal/flatbuf/Struct_.go               |    53 -
 go/arrow/internal/flatbuf/Tensor.go                |   163 -
 go/arrow/internal/flatbuf/TensorDim.go             |    83 -
 go/arrow/internal/flatbuf/Time.go                  |    83 -
 go/arrow/internal/flatbuf/TimeUnit.go              |    51 -
 go/arrow/internal/flatbuf/Timestamp.go             |   122 -
 go/arrow/internal/flatbuf/Type.go                  |   108 -
 go/arrow/internal/flatbuf/Union.go                 |   101 -
 go/arrow/internal/flatbuf/UnionMode.go             |    45 -
 go/arrow/internal/flatbuf/Utf8.go                  |    51 -
 go/arrow/internal/testing/tools/bits.go            |    40 -
 go/arrow/internal/testing/tools/bits_test.go       |    42 -
 go/arrow/internal/testing/tools/bool.go            |    25 -
 go/arrow/ipc/cmd/arrow-cat/main.go                 |   216 -
 go/arrow/ipc/cmd/arrow-cat/main_test.go            |   582 -
 go/arrow/ipc/cmd/arrow-file-to-stream/main.go      |    83 -
 go/arrow/ipc/cmd/arrow-file-to-stream/main_test.go |    73 -
 .../ipc/cmd/arrow-json-integration-test/main.go    |   226 -
 .../cmd/arrow-json-integration-test/main_test.go   |    94 -
 go/arrow/ipc/cmd/arrow-ls/main.go                  |   201 -
 go/arrow/ipc/cmd/arrow-ls/main_test.go             |   341 -
 go/arrow/ipc/cmd/arrow-stream-to-file/main.go      |    71 -
 go/arrow/ipc/cmd/arrow-stream-to-file/main_test.go |    82 -
 go/arrow/ipc/compression.go                        |   109 -
 go/arrow/ipc/dict.go                               |    85 -
 go/arrow/ipc/dict_test.go                          |   196 -
 go/arrow/ipc/file_reader.go                        |   615 -
 go/arrow/ipc/file_test.go                          |    83 -
 go/arrow/ipc/file_writer.go                        |   376 -
 go/arrow/ipc/ipc.go                                |   144 -
 go/arrow/ipc/message.go                            |   241 -
 go/arrow/ipc/metadata.go                           |  1073 -
 go/arrow/ipc/metadata_test.go                      |   159 -
 go/arrow/ipc/reader.go                             |   209 -
 go/arrow/ipc/stream_test.go                        |   111 -
 go/arrow/ipc/writer.go                             |   565 -
 go/arrow/math/Makefile                             |    90 -
 go/arrow/math/_lib/.gitignore                      |    18 -
 go/arrow/math/_lib/CMakeLists.txt                  |    22 -
 go/arrow/math/_lib/arch.h                          |    27 -
 go/arrow/math/_lib/float64.c                       |    26 -
 go/arrow/math/_lib/float64_avx2.s                  |   176 -
 go/arrow/math/_lib/float64_sse4.s                  |   103 -
 go/arrow/math/_lib/int64.c                         |    27 -
 go/arrow/math/_lib/int64_avx2.s                    |   181 -
 go/arrow/math/_lib/int64_sse4.s                    |   108 -
 go/arrow/math/_lib/uint64.c                        |    27 -
 go/arrow/math/_lib/uint64_avx2.s                   |   181 -
 go/arrow/math/_lib/uint64_sse4.s                   |   108 -
 go/arrow/math/doc.go                               |    30 -
 go/arrow/math/float64.go                           |    47 -
 go/arrow/math/float64.tmpldata                     |     4 -
 go/arrow/math/float64_amd64.go                     |    33 -
 go/arrow/math/float64_avx2_amd64.go                |    41 -
 go/arrow/math/float64_avx2_amd64.s                 |   167 -
 go/arrow/math/float64_noasm.go                     |    25 -
 go/arrow/math/float64_s390x.go                     |    25 -
 go/arrow/math/float64_sse4_amd64.go                |    41 -
 go/arrow/math/float64_sse4_amd64.s                 |    94 -
 go/arrow/math/float64_test.go                      |    86 -
 go/arrow/math/int64.go                             |    47 -
 go/arrow/math/int64.tmpldata                       |     4 -
 go/arrow/math/int64_amd64.go                       |    33 -
 go/arrow/math/int64_avx2_amd64.go                  |    41 -
 go/arrow/math/int64_avx2_amd64.s                   |   173 -
 go/arrow/math/int64_noasm.go                       |    25 -
 go/arrow/math/int64_s390x.go                       |    25 -
 go/arrow/math/int64_sse4_amd64.go                  |    41 -
 go/arrow/math/int64_sse4_amd64.s                   |   100 -
 go/arrow/math/int64_test.go                        |    86 -
 go/arrow/math/math_amd64.go                        |    51 -
 go/arrow/math/math_noasm.go                        |    29 -
 go/arrow/math/math_s390x.go                        |    29 -
 go/arrow/math/type.go.tmpl                         |    48 -
 go/arrow/math/type_amd64.go.tmpl                   |    33 -
 go/arrow/math/type_noasm.go.tmpl                   |    25 -
 go/arrow/math/type_s390x.go.tmpl                   |    25 -
 go/arrow/math/type_simd_amd64.go.tmpl              |    42 -
 go/arrow/math/type_test.go.tmpl                    |    87 -
 go/arrow/math/uint64.go                            |    47 -
 go/arrow/math/uint64.tmpldata                      |     4 -
 go/arrow/math/uint64_amd64.go                      |    33 -
 go/arrow/math/uint64_avx2_amd64.go                 |    41 -
 go/arrow/math/uint64_avx2_amd64.s                  |   173 -
 go/arrow/math/uint64_noasm.go                      |    25 -
 go/arrow/math/uint64_s390x.go                      |    25 -
 go/arrow/math/uint64_sse4_amd64.go                 |    41 -
 go/arrow/math/uint64_sse4_amd64.s                  |   100 -
 go/arrow/math/uint64_test.go                       |    86 -
 go/arrow/memory/Makefile                           |    54 -
 go/arrow/memory/_lib/.gitignore                    |    18 -
 go/arrow/memory/_lib/CMakeLists.txt                |    22 -
 go/arrow/memory/_lib/arch.h                        |    27 -
 go/arrow/memory/_lib/memory.c                      |    27 -
 go/arrow/memory/_lib/memory_avx2.s                 |    97 -
 go/arrow/memory/_lib/memory_sse4.s                 |    96 -
 go/arrow/memory/allocator.go                       |    33 -
 go/arrow/memory/buffer.go                          |   125 -
 go/arrow/memory/buffer_test.go                     |    57 -
 go/arrow/memory/checked_allocator.go               |    74 -
 go/arrow/memory/doc.go                             |    20 -
 go/arrow/memory/go_allocator.go                    |    48 -
 go/arrow/memory/go_allocator_test.go               |    76 -
 go/arrow/memory/memory.go                          |    33 -
 go/arrow/memory/memory_amd64.go                    |    33 -
 go/arrow/memory/memory_avx2_amd64.go               |    41 -
 go/arrow/memory/memory_avx2_amd64.s                |    85 -
 go/arrow/memory/memory_js_wasm.go                  |    23 -
 go/arrow/memory/memory_noasm.go                    |    23 -
 go/arrow/memory/memory_sse4_amd64.go               |    31 -
 go/arrow/memory/memory_sse4_amd64.s                |    84 -
 go/arrow/memory/memory_test.go                     |   125 -
 go/arrow/memory/util.go                            |    37 -
 go/arrow/memory/util_test.go                       |    61 -
 go/arrow/numeric.schema.json                       |    15 -
 go/arrow/numeric.tmpldata                          |   141 -
 go/arrow/schema.go                                 |   193 -
 go/arrow/schema_test.go                            |   363 -
 go/arrow/tensor/numeric.gen.go                     |   327 -
 go/arrow/tensor/numeric.gen.go.tmpl                |    55 -
 go/arrow/tensor/numeric.gen_test.go                |  1170 -
 go/arrow/tensor/numeric.gen_test.go.tmpl           |   126 -
 go/arrow/tensor/tensor.go                          |   247 -
 go/arrow/tensor/tensor_test.go                     |   166 -
 go/arrow/type_string.go                            |    53 -
 go/arrow/type_traits_boolean.go                    |    28 -
 go/arrow/type_traits_decimal128.go                 |    75 -
 go/arrow/type_traits_float16.go                    |    74 -
 go/arrow/type_traits_interval.go                   |   126 -
 go/arrow/type_traits_numeric.gen.go                |   814 -
 go/arrow/type_traits_numeric.gen.go.tmpl           |    95 -
 go/arrow/type_traits_numeric.gen_test.go           |   570 -
 go/arrow/type_traits_numeric.gen_test.go.tmpl      |    61 -
 go/arrow/type_traits_test.go                       |   201 -
 go/parquet/.gitignore                              |    31 -
 go/parquet/LICENSE.txt                             |  1987 --
 go/parquet/compress/brotli.go                      |   115 -
 go/parquet/compress/compress.go                    |   156 -
 go/parquet/compress/compress_test.go               |   138 -
 go/parquet/compress/gzip.go                        |    98 -
 go/parquet/compress/snappy.go                      |    62 -
 go/parquet/compress/zstd.go                        |   112 -
 go/parquet/doc.go                                  |    68 -
 go/parquet/encryption_properties.go                |   711 -
 go/parquet/encryption_properties_test.go           |   217 -
 go/parquet/go.mod                                  |    35 -
 go/parquet/go.sum                                  |   155 -
 go/parquet/internal/bmi/Makefile                   |    47 -
 go/parquet/internal/bmi/_lib/bitmap_bmi2.c         |    30 -
 go/parquet/internal/bmi/_lib/bitmap_bmi2.s         |   140 -
 go/parquet/internal/bmi/bitmap_bmi2.go             |    48 -
 go/parquet/internal/bmi/bitmap_bmi2.s              |   117 -
 go/parquet/internal/bmi/bmi_init.go                |    60 -
 go/parquet/internal/bmi/bmi_noasm.go               |   249 -
 go/parquet/internal/debug/assert_off.go            |    24 -
 go/parquet/internal/debug/assert_on.go             |    28 -
 go/parquet/internal/debug/doc.go                   |    23 -
 go/parquet/internal/encryption/aes.go              |   264 -
 go/parquet/internal/encryption/decryptor.go        |   261 -
 go/parquet/internal/encryption/encryptor.go        |   237 -
 go/parquet/internal/encryption/key_handling.go     |    62 -
 .../gen-go/parquet/GoUnusedProtection__.go         |     6 -
 .../internal/gen-go/parquet/parquet-consts.go      |    23 -
 go/parquet/internal/gen-go/parquet/parquet.go      | 10961 ------
 .../internal/gen-go/parquet/staticcheck.conf       |    17 -
 go/parquet/internal/testutils/random.go            |   452 -
 go/parquet/internal/testutils/random_arrow.go      |   488 -
 go/parquet/internal/thrift/helpers.go              |    87 -
 go/parquet/internal/utils/Makefile                 |    72 -
 go/parquet/internal/utils/_lib/arch.h              |    27 -
 go/parquet/internal/utils/_lib/bit_packing_avx2.c  |  1879 --
 go/parquet/internal/utils/_lib/bit_packing_avx2.s  |  4012 ---
 go/parquet/internal/utils/_lib/min_max.c           |    73 -
 go/parquet/internal/utils/_lib/min_max_avx2.s      |  1366 -
 go/parquet/internal/utils/_lib/min_max_sse4.s      |   613 -
 go/parquet/internal/utils/_lib/unpack_bool.c       |    30 -
 go/parquet/internal/utils/_lib/unpack_bool_avx2.s  |  6293 ----
 go/parquet/internal/utils/_lib/unpack_bool_sse4.s  |   104 -
 go/parquet/internal/utils/bit_benchmark_test.go    |   220 -
 go/parquet/internal/utils/bit_block_counter.go     |   263 -
 .../internal/utils/bit_block_counter_test.go       |   201 -
 go/parquet/internal/utils/bit_packing.go           |    35 -
 go/parquet/internal/utils/bit_packing_avx2.go      |    53 -
 go/parquet/internal/utils/bit_packing_avx2.s       |  3439 --
 go/parquet/internal/utils/bit_packing_default.go   |  1941 --
 go/parquet/internal/utils/bit_packing_noasm.go     |    23 -
 go/parquet/internal/utils/bit_reader.go            |   348 -
 go/parquet/internal/utils/bit_reader_test.go       |   619 -
 go/parquet/internal/utils/bit_run_reader.go        |   148 -
 go/parquet/internal/utils/bit_run_reader_test.go   |   158 -
 go/parquet/internal/utils/bit_set_run_reader.go    |   345 -
 .../internal/utils/bit_set_run_reader_test.go      |   276 -
 go/parquet/internal/utils/bit_writer.go            |   182 -
 go/parquet/internal/utils/bitmap_reader.go         |    72 -
 go/parquet/internal/utils/bitmap_reader_test.go    |    75 -
 go/parquet/internal/utils/bitmap_writer.go         |   277 -
 go/parquet/internal/utils/bitmap_writer_test.go    |   304 -
 go/parquet/internal/utils/clib_amd64.s             |    87 -
 go/parquet/internal/utils/dictionary.go            |    87 -
 go/parquet/internal/utils/math.go                  |    49 -
 go/parquet/internal/utils/min_max.go               |   120 -
 go/parquet/internal/utils/min_max_amd64.go         |    43 -
 go/parquet/internal/utils/min_max_avx2.go          |    58 -
 go/parquet/internal/utils/min_max_avx2.s           |  1352 -
 go/parquet/internal/utils/min_max_noasm.go         |    27 -
 go/parquet/internal/utils/min_max_sse4.go          |    56 -
 go/parquet/internal/utils/min_max_sse4.s           |   592 -
 go/parquet/internal/utils/physical_types.tmpldata  |    52 -
 go/parquet/internal/utils/rle.go                   |   583 -
 go/parquet/internal/utils/typed_rle_dict.gen.go    |  1375 -
 .../internal/utils/typed_rle_dict.gen.go.tmpl      |   218 -
 go/parquet/internal/utils/unpack_bool.go           |    26 -
 go/parquet/internal/utils/unpack_bool_amd64.go     |    41 -
 go/parquet/internal/utils/unpack_bool_avx2.go      |    29 -
 go/parquet/internal/utils/unpack_bool_avx2.s       |  6961 ----
 go/parquet/internal/utils/unpack_bool_noasm.go     |    25 -
 go/parquet/internal/utils/unpack_bool_sse4.go      |    29 -
 go/parquet/internal/utils/unpack_bool_sse4.s       |    88 -
 go/parquet/internal/utils/write_utils.go           |    57 -
 go/parquet/reader_properties.go                    |    79 -
 go/parquet/reader_writer_properties_test.go        |    69 -
 go/parquet/tools.go                                |    25 -
 go/parquet/types.go                                |   354 -
 go/parquet/writer_properties.go                    |   510 -
 java/.gitattributes                                |     2 -
 java/.gitignore                                    |    23 -
 java/README.md                                     |   164 -
 java/adapter/avro/pom.xml                          |    59 -
 .../main/java/org/apache/arrow/AvroToArrow.java    |    67 -
 .../java/org/apache/arrow/AvroToArrowConfig.java   |    86 -
 .../org/apache/arrow/AvroToArrowConfigBuilder.java |    74 -
 .../java/org/apache/arrow/AvroToArrowUtils.java    |   805 -
 .../apache/arrow/AvroToArrowVectorIterator.java    |   186 -
 .../apache/arrow/consumers/AvroArraysConsumer.java |    74 -
 .../arrow/consumers/AvroBooleanConsumer.java       |    43 -
 .../apache/arrow/consumers/AvroBytesConsumer.java  |    49 -
 .../apache/arrow/consumers/AvroDoubleConsumer.java |    42 -
 .../apache/arrow/consumers/AvroEnumConsumer.java   |    43 -
 .../apache/arrow/consumers/AvroFixedConsumer.java  |    46 -
 .../apache/arrow/consumers/AvroFloatConsumer.java  |    42 -
 .../apache/arrow/consumers/AvroIntConsumer.java    |    42 -
 .../apache/arrow/consumers/AvroLongConsumer.java   |    42 -
 .../apache/arrow/consumers/AvroMapConsumer.java    |    79 -
 .../apache/arrow/consumers/AvroNullConsumer.java   |    39 -
 .../apache/arrow/consumers/AvroStringConsumer.java |    48 -
 .../apache/arrow/consumers/AvroStructConsumer.java |    76 -
 .../apache/arrow/consumers/AvroUnionsConsumer.java |    86 -
 .../apache/arrow/consumers/BaseAvroConsumer.java   |    65 -
 .../arrow/consumers/CompositeAvroConsumer.java     |    73 -
 .../java/org/apache/arrow/consumers/Consumer.java  |    71 -
 .../org/apache/arrow/consumers/SkipConsumer.java   |    67 -
 .../org/apache/arrow/consumers/SkipFunction.java   |    30 -
 .../arrow/consumers/logical/AvroDateConsumer.java  |    43 -
 .../consumers/logical/AvroDecimalConsumer.java     |    88 -
 .../consumers/logical/AvroTimeMicroConsumer.java   |    43 -
 .../consumers/logical/AvroTimeMillisConsumer.java  |    43 -
 .../logical/AvroTimestampMicrosConsumer.java       |    43 -
 .../logical/AvroTimestampMillisConsumer.java       |    43 -
 .../org/apache/arrow/AvroLogicalTypesTest.java     |   201 -
 .../java/org/apache/arrow/AvroSkipFieldTest.java   |   626 -
 .../test/java/org/apache/arrow/AvroTestBase.java   |   229 -
 .../org/apache/arrow/AvroToArrowIteratorTest.java  |   313 -
 .../java/org/apache/arrow/AvroToArrowTest.java     |   477 -
 .../org/apache/arrow/TestWriteReadAvroRecord.java  |    93 -
 .../resources/schema/attrs/test_enum_attrs.avsc    |    24 -
 .../resources/schema/attrs/test_fixed_attr.avsc    |    24 -
 .../resources/schema/attrs/test_record_attrs.avsc  |    37 -
 .../test/resources/schema/logical/test_date.avsc   |    23 -
 .../schema/logical/test_decimal_invalid1.avsc      |    25 -
 .../schema/logical/test_decimal_invalid2.avsc      |    25 -
 .../schema/logical/test_decimal_invalid3.avsc      |    25 -
 .../schema/logical/test_decimal_invalid4.avsc      |    26 -
 .../logical/test_decimal_with_original_bytes.avsc  |    25 -
 .../logical/test_decimal_with_original_fixed.avsc  |    26 -
 .../resources/schema/logical/test_time_micros.avsc |    23 -
 .../resources/schema/logical/test_time_millis.avsc |    23 -
 .../schema/logical/test_timestamp_micros.avsc      |    23 -
 .../schema/logical/test_timestamp_millis.avsc      |    23 -
 .../schema/skip/test_skip_array_before.avsc        |    27 -
 .../schema/skip/test_skip_array_expected.avsc      |    26 -
 .../resources/schema/skip/test_skip_base1.avsc     |    28 -
 .../resources/schema/skip/test_skip_base2.avsc     |    29 -
 .../schema/skip/test_skip_boolean_expected.avsc    |    28 -
 .../schema/skip/test_skip_bytes_expected.avsc      |    27 -
 .../schema/skip/test_skip_double_expected.avsc     |    28 -
 .../schema/skip/test_skip_enum_expected.avsc       |    27 -
 .../schema/skip/test_skip_fixed_expected.avsc      |    27 -
 .../schema/skip/test_skip_float_expected.avsc      |    28 -
 .../schema/skip/test_skip_int_expected.avsc        |    28 -
 .../schema/skip/test_skip_long_expected.avsc       |    28 -
 .../schema/skip/test_skip_map_before.avsc          |    27 -
 .../schema/skip/test_skip_map_expected.avsc        |    26 -
 .../skip/test_skip_multi_fields_expected.avsc      |    25 -
 .../schema/skip/test_skip_record_before.avsc       |    38 -
 .../schema/skip/test_skip_record_expected.avsc     |    25 -
 .../skip/test_skip_second_level_expected.avsc      |    34 -
 .../skip/test_skip_single_field_expected.avsc      |    26 -
 .../schema/skip/test_skip_string_expected.avsc     |    27 -
 .../skip/test_skip_third_level_expected.avsc       |    45 -
 .../schema/skip/test_skip_union_before.avsc        |    28 -
 .../test_skip_union_multi_fields_expected.avsc     |    27 -
 .../test_skip_union_nullable_field_expected.avsc   |    27 -
 .../skip/test_skip_union_one_field_expected.avsc   |    27 -
 .../avro/src/test/resources/schema/test.avsc       |    27 -
 .../avro/src/test/resources/schema/test_array.avsc |    23 -
 .../avro/src/test/resources/schema/test_fixed.avsc |    23 -
 .../src/test/resources/schema/test_large_data.avsc |    75 -
 .../avro/src/test/resources/schema/test_map.avsc   |    23 -
 .../test/resources/schema/test_nested_record.avsc  |    35 -
 .../resources/schema/test_nullable_boolean.avsc    |    25 -
 .../test/resources/schema/test_nullable_bytes.avsc |    25 -
 .../resources/schema/test_nullable_double.avsc     |    25 -
 .../test/resources/schema/test_nullable_float.avsc |    25 -
 .../test/resources/schema/test_nullable_int.avsc   |    25 -
 .../test/resources/schema/test_nullable_long.avsc  |    25 -
 .../resources/schema/test_nullable_string.avsc     |    25 -
 .../test/resources/schema/test_nullable_union.avsc |    25 -
 .../resources/schema/test_primitive_boolean.avsc   |    22 -
 .../resources/schema/test_primitive_bytes.avsc     |    22 -
 .../resources/schema/test_primitive_double.avsc    |    22 -
 .../test/resources/schema/test_primitive_enum.avsc |    23 -
 .../resources/schema/test_primitive_float.avsc     |    22 -
 .../test/resources/schema/test_primitive_int.avsc  |    22 -
 .../test/resources/schema/test_primitive_long.avsc |    22 -
 .../resources/schema/test_primitive_string.avsc    |    22 -
 .../src/test/resources/schema/test_record.avsc     |    27 -
 .../avro/src/test/resources/schema/test_union.avsc |    25 -
 java/adapter/jdbc/pom.xml                          |   109 -
 .../arrow/adapter/jdbc/ArrowVectorIterator.java    |   186 -
 .../org/apache/arrow/adapter/jdbc/Constants.java   |    31 -
 .../apache/arrow/adapter/jdbc/JdbcFieldInfo.java   |   114 -
 .../org/apache/arrow/adapter/jdbc/JdbcToArrow.java |   273 -
 .../arrow/adapter/jdbc/JdbcToArrowConfig.java      |   280 -
 .../adapter/jdbc/JdbcToArrowConfigBuilder.java     |   192 -
 .../arrow/adapter/jdbc/JdbcToArrowUtils.java       |   336 -
 .../arrow/adapter/jdbc/consumer/ArrayConsumer.java |   134 -
 .../arrow/adapter/jdbc/consumer/BaseConsumer.java  |    54 -
 .../adapter/jdbc/consumer/BigIntConsumer.java      |    87 -
 .../adapter/jdbc/consumer/BinaryConsumer.java      |   135 -
 .../arrow/adapter/jdbc/consumer/BitConsumer.java   |    87 -
 .../arrow/adapter/jdbc/consumer/BlobConsumer.java  |    72 -
 .../arrow/adapter/jdbc/consumer/ClobConsumer.java  |   161 -
 .../jdbc/consumer/CompositeJdbcConsumer.java       |    76 -
 .../arrow/adapter/jdbc/consumer/DateConsumer.java  |   116 -
 .../adapter/jdbc/consumer/DecimalConsumer.java     |    88 -
 .../adapter/jdbc/consumer/DoubleConsumer.java      |    87 -
 .../arrow/adapter/jdbc/consumer/FloatConsumer.java |    87 -
 .../arrow/adapter/jdbc/consumer/IntConsumer.java   |    87 -
 .../arrow/adapter/jdbc/consumer/JdbcConsumer.java  |    46 -
 .../arrow/adapter/jdbc/consumer/NullConsumer.java  |    38 -
 .../adapter/jdbc/consumer/SmallIntConsumer.java    |    87 -
 .../arrow/adapter/jdbc/consumer/TimeConsumer.java  |   112 -
 .../adapter/jdbc/consumer/TimestampConsumer.java   |    89 -
 .../adapter/jdbc/consumer/TimestampTZConsumer.java |    97 -
 .../adapter/jdbc/consumer/TinyIntConsumer.java     |    87 -
 .../adapter/jdbc/consumer/VarCharConsumer.java     |    86 -
 .../adapter/jdbc/AbstractJdbcToArrowTest.java      |   143 -
 .../arrow/adapter/jdbc/JdbcFieldInfoTest.java      |    45 -
 .../arrow/adapter/jdbc/JdbcToArrowConfigTest.java  |   159 -
 .../arrow/adapter/jdbc/JdbcToArrowTestHelper.java  |   381 -
 .../java/org/apache/arrow/adapter/jdbc/Table.java  |   234 -
 .../jdbc/consumer/AbstractConsumerTest.java        |    39 -
 .../adapter/jdbc/consumer/BinaryConsumerTest.java  |   117 -
 .../adapter/jdbc/h2/JdbcToArrowArrayTest.java      |   373 -
 .../adapter/jdbc/h2/JdbcToArrowCharSetTest.java    |   156 -
 .../adapter/jdbc/h2/JdbcToArrowDataTypesTest.java  |   249 -
 .../arrow/adapter/jdbc/h2/JdbcToArrowNullTest.java |   278 -
 .../arrow/adapter/jdbc/h2/JdbcToArrowTest.java     |  1339 -
 .../adapter/jdbc/h2/JdbcToArrowTimeZoneTest.java   |   169 -
 .../jdbc/h2/JdbcToArrowVectorIteratorTest.java     |   427 -
 .../test/resources/h2/test1_all_datatypes_h2.yml   |   121 -
 .../resources/h2/test1_all_datatypes_null_h2.yml   |    51 -
 .../test1_all_datatypes_selected_null_rows_h2.yml  |    83 -
 .../jdbc/src/test/resources/h2/test1_bigint_h2.yml |    46 -
 .../jdbc/src/test/resources/h2/test1_binary_h2.yml |    46 -
 .../jdbc/src/test/resources/h2/test1_bit_h2.yml    |    46 -
 .../jdbc/src/test/resources/h2/test1_blob_h2.yml   |    46 -
 .../jdbc/src/test/resources/h2/test1_bool_h2.yml   |    46 -
 .../jdbc/src/test/resources/h2/test1_char_h2.yml   |    46 -
 .../src/test/resources/h2/test1_charset_ch_h2.yml  |    43 -
 .../src/test/resources/h2/test1_charset_h2.yml     |    53 -
 .../src/test/resources/h2/test1_charset_jp_h2.yml  |    43 -
 .../src/test/resources/h2/test1_charset_kr_h2.yml  |    43 -
 .../jdbc/src/test/resources/h2/test1_clob_h2.yml   |    46 -
 .../jdbc/src/test/resources/h2/test1_date_h2.yml   |    48 -
 .../src/test/resources/h2/test1_decimal_h2.yml     |    46 -
 .../jdbc/src/test/resources/h2/test1_double_h2.yml |    46 -
 .../src/test/resources/h2/test1_est_date_h2.yml    |    48 -
 .../src/test/resources/h2/test1_est_time_h2.yml    |    48 -
 .../test/resources/h2/test1_est_timestamp_h2.yml   |    49 -
 .../src/test/resources/h2/test1_gmt_date_h2.yml    |    48 -
 .../src/test/resources/h2/test1_gmt_time_h2.yml    |    48 -
 .../test/resources/h2/test1_gmt_timestamp_h2.yml   |    48 -
 .../jdbc/src/test/resources/h2/test1_int_h2.yml    |    46 -
 .../jdbc/src/test/resources/h2/test1_null_h2.yml   |    36 -
 .../src/test/resources/h2/test1_pst_date_h2.yml    |    48 -
 .../src/test/resources/h2/test1_pst_time_h2.yml    |    48 -
 .../test/resources/h2/test1_pst_timestamp_h2.yml   |    48 -
 .../jdbc/src/test/resources/h2/test1_real_h2.yml   |    46 -
 .../h2/test1_selected_datatypes_null_h2.yml        |    46 -
 .../src/test/resources/h2/test1_smallint_h2.yml    |    46 -
 .../jdbc/src/test/resources/h2/test1_time_h2.yml   |    46 -
 .../src/test/resources/h2/test1_timestamp_h2.yml   |    46 -
 .../src/test/resources/h2/test1_tinyint_h2.yml     |    46 -
 .../src/test/resources/h2/test1_varchar_h2.yml     |    46 -
 java/adapter/jdbc/src/test/resources/logback.xml   |    28 -
 java/adapter/orc/CMakeLists.txt                    |    43 -
 java/adapter/orc/pom.xml                           |   124 -
 .../org/apache/arrow/adapter/orc/OrcFieldNode.java |    45 -
 .../org/apache/arrow/adapter/orc/OrcJniUtils.java  |    62 -
 .../arrow/adapter/orc/OrcMemoryJniWrapper.java     |    77 -
 .../org/apache/arrow/adapter/orc/OrcReader.java    |    90 -
 .../arrow/adapter/orc/OrcReaderJniWrapper.java     |    79 -
 .../apache/arrow/adapter/orc/OrcRecordBatch.java   |    47 -
 .../arrow/adapter/orc/OrcReferenceManager.java     |   121 -
 .../apache/arrow/adapter/orc/OrcStripeReader.java  |   109 -
 .../adapter/orc/OrcStripeReaderJniWrapper.java     |    45 -
 .../apache/arrow/adapter/orc/OrcReaderTest.java    |   104 -
 java/algorithm/pom.xml                             |    55 -
 .../algorithm/deduplicate/DeduplicationUtils.java  |    96 -
 .../deduplicate/VectorRunDeduplicator.java         |   108 -
 .../algorithm/dictionary/DictionaryBuilder.java    |    72 -
 .../algorithm/dictionary/DictionaryEncoder.java    |    39 -
 .../HashTableBasedDictionaryBuilder.java           |   153 -
 .../dictionary/HashTableDictionaryEncoder.java     |   146 -
 .../dictionary/LinearDictionaryEncoder.java        |   112 -
 .../dictionary/SearchDictionaryEncoder.java        |   100 -
 .../SearchTreeBasedDictionaryBuilder.java          |   146 -
 .../arrow/algorithm/misc/PartialSumUtils.java      |   119 -
 .../apache/arrow/algorithm/rank/VectorRank.java    |    89 -
 .../arrow/algorithm/search/ParallelSearcher.java   |   190 -
 .../algorithm/search/VectorRangeSearcher.java      |   108 -
 .../arrow/algorithm/search/VectorSearcher.java     |    88 -
 .../algorithm/sort/CompositeVectorComparator.java  |    71 -
 .../algorithm/sort/DefaultVectorComparators.java   |   431 -
 .../sort/FixedWidthInPlaceVectorSorter.java        |   169 -
 .../sort/FixedWidthOutOfPlaceVectorSorter.java     |    80 -
 .../arrow/algorithm/sort/InPlaceVectorSorter.java  |    37 -
 .../apache/arrow/algorithm/sort/IndexSorter.java   |   180 -
 .../arrow/algorithm/sort/InsertionSorter.java      |    74 -
 .../arrow/algorithm/sort/OffHeapIntStack.java      |    72 -
 .../algorithm/sort/OutOfPlaceVectorSorter.java     |    37 -
 .../algorithm/sort/StableVectorComparator.java     |    66 -
 .../sort/VariableWidthOutOfPlaceVectorSorter.java  |    93 -
 .../algorithm/sort/VectorValueComparator.java      |   123 -
 .../deduplicate/TestDeduplicationUtils.java        |   135 -
 .../deduplicate/TestVectorRunDeduplicator.java     |   131 -
 .../TestHashTableBasedDictionaryBuilder.java       |   202 -
 .../dictionary/TestHashTableDictionaryEncoder.java |   350 -
 .../dictionary/TestLinearDictionaryEncoder.java    |   350 -
 .../dictionary/TestSearchDictionaryEncoder.java    |   357 -
 .../TestSearchTreeBasedDictionaryBuilder.java      |   221 -
 .../arrow/algorithm/misc/TestPartialSumUtils.java  |   138 -
 .../arrow/algorithm/rank/TestVectorRank.java       |   145 -
 .../algorithm/search/TestParallelSearcher.java     |   150 -
 .../algorithm/search/TestVectorRangeSearcher.java  |   195 -
 .../arrow/algorithm/search/TestVectorSearcher.java |   299 -
 .../sort/TestCompositeVectorComparator.java        |   112 -
 .../sort/TestDefaultVectorComparator.java          |   393 -
 .../sort/TestFixedWidthInPlaceVectorSorter.java    |   240 -
 .../sort/TestFixedWidthOutOfPlaceVectorSorter.java |   365 -
 .../algorithm/sort/TestFixedWidthSorting.java      |   172 -
 .../arrow/algorithm/sort/TestIndexSorter.java      |   205 -
 .../arrow/algorithm/sort/TestInsertionSorter.java  |   117 -
 .../arrow/algorithm/sort/TestOffHeapIntStack.java  |    67 -
 .../arrow/algorithm/sort/TestSortingUtil.java      |   166 -
 .../algorithm/sort/TestStableVectorComparator.java |   137 -
 .../TestVariableWidthOutOfPlaceVectorSorter.java   |    99 -
 .../algorithm/sort/TestVariableWidthSorting.java   |   165 -
 java/api-changes.md                                |    32 -
 java/compression/pom.xml                           |    52 -
 .../compression/CommonsCompressionFactory.java     |    43 -
 .../arrow/compression/Lz4CompressionCodec.java     |    89 -
 .../arrow/compression/ZstdCompressionCodec.java    |    74 -
 .../arrow/compression/TestCompressionCodec.java    |   213 -
 java/dataset/CMakeLists.txt                        |    43 -
 java/dataset/pom.xml                               |   134 -
 .../org/apache/arrow/dataset/file/FileFormat.java  |    36 -
 .../dataset/file/FileSystemDatasetFactory.java     |    38 -
 .../org/apache/arrow/dataset/file/JniWrapper.java  |    47 -
 .../dataset/jni/DirectReservationListener.java     |    97 -
 .../org/apache/arrow/dataset/jni/JniLoader.java    |    94 -
 .../org/apache/arrow/dataset/jni/JniWrapper.java   |   105 -
 .../apache/arrow/dataset/jni/NativeContext.java    |    53 -
 .../apache/arrow/dataset/jni/NativeDataset.java    |    56 -
 .../arrow/dataset/jni/NativeDatasetFactory.java    |   104 -
 .../jni/NativeInstanceReleasedException.java       |    31 -
 .../apache/arrow/dataset/jni/NativeMemoryPool.java |    76 -
 .../arrow/dataset/jni/NativeRecordBatchHandle.java |   106 -
 .../apache/arrow/dataset/jni/NativeScanTask.java   |    46 -
 .../apache/arrow/dataset/jni/NativeScanner.java    |   170 -
 .../arrow/dataset/jni/ReservationListener.java     |    36 -
 .../apache/arrow/dataset/scanner/ScanOptions.java  |    44 -
 .../org/apache/arrow/dataset/scanner/ScanTask.java |    42 -
 .../org/apache/arrow/dataset/scanner/Scanner.java  |    41 -
 .../org/apache/arrow/dataset/source/Dataset.java   |    35 -
 .../arrow/dataset/source/DatasetFactory.java       |    51 -
 .../arrow/memory/NativeUnderlyingMemory.java       |    81 -
 .../apache/arrow/dataset/ParquetWriteSupport.java  |   123 -
 .../java/org/apache/arrow/dataset/TestDataset.java |    97 -
 .../arrow/dataset/file/TestFileSystemDataset.java  |   292 -
 .../dataset/file/TestFileSystemDatasetFactory.java |    48 -
 .../arrow/dataset/jni/TestNativeDataset.java       |    33 -
 .../arrow/dataset/jni/TestReservationListener.java |    88 -
 .../arrow/memory/TestNativeUnderlyingMemory.java   |   110 -
 .../src/test/resources/avroschema/user.avsc        |    26 -
 java/dev/checkstyle/checkstyle.license             |    16 -
 java/dev/checkstyle/checkstyle.xml                 |   280 -
 java/dev/checkstyle/suppressions.xml               |    42 -
 java/flight/flight-core/README.md                  |    95 -
 java/flight/flight-core/pom.xml                    |   392 -
 .../main/java/org/apache/arrow/flight/Action.java  |    61 -
 .../java/org/apache/arrow/flight/ActionType.java   |    70 -
 .../java/org/apache/arrow/flight/ArrowMessage.java |   560 -
 .../org/apache/arrow/flight/AsyncPutListener.java  |    72 -
 .../apache/arrow/flight/BackpressureStrategy.java  |   172 -
 .../java/org/apache/arrow/flight/CallHeaders.java  |    65 -
 .../java/org/apache/arrow/flight/CallInfo.java     |    33 -
 .../java/org/apache/arrow/flight/CallOption.java   |    24 -
 .../java/org/apache/arrow/flight/CallOptions.java  |    62 -
 .../java/org/apache/arrow/flight/CallStatus.java   |   143 -
 .../java/org/apache/arrow/flight/Criteria.java     |    58 -
 .../org/apache/arrow/flight/DictionaryUtils.java   |   127 -
 .../apache/arrow/flight/ErrorFlightMetadata.java   |    81 -
 .../apache/arrow/flight/FlightBindingService.java  |   174 -
 .../org/apache/arrow/flight/FlightCallHeaders.java |   111 -
 .../java/org/apache/arrow/flight/FlightClient.java |   721 -
 .../arrow/flight/FlightClientMiddleware.java       |    52 -
 .../org/apache/arrow/flight/FlightConstants.java   |    29 -
 .../org/apache/arrow/flight/FlightDescriptor.java  |   180 -
 .../org/apache/arrow/flight/FlightEndpoint.java    |   106 -
 .../java/org/apache/arrow/flight/FlightInfo.java   |   208 -
 .../java/org/apache/arrow/flight/FlightMethod.java |    64 -
 .../org/apache/arrow/flight/FlightProducer.java    |   164 -
 .../arrow/flight/FlightRuntimeException.java       |    46 -
 .../java/org/apache/arrow/flight/FlightServer.java |   399 -
 .../arrow/flight/FlightServerMiddleware.java       |   100 -
 .../org/apache/arrow/flight/FlightService.java     |   427 -
 .../org/apache/arrow/flight/FlightStatusCode.java  |    82 -
 .../java/org/apache/arrow/flight/FlightStream.java |   497 -
 .../org/apache/arrow/flight/HeaderCallOption.java  |    52 -
 .../java/org/apache/arrow/flight/Location.java     |   158 -
 .../org/apache/arrow/flight/LocationSchemes.java   |    32 -
 .../apache/arrow/flight/NoOpFlightProducer.java    |    61 -
 .../apache/arrow/flight/NoOpStreamListener.java    |    49 -
 .../arrow/flight/OutboundStreamListener.java       |   123 -
 .../arrow/flight/OutboundStreamListenerImpl.java   |   132 -
 .../java/org/apache/arrow/flight/PutResult.java    |    96 -
 .../org/apache/arrow/flight/RequestContext.java    |    51 -
 .../main/java/org/apache/arrow/flight/Result.java  |    50 -
 .../java/org/apache/arrow/flight/SchemaResult.java |    96 -
 .../arrow/flight/ServerHeaderMiddleware.java       |    65 -
 .../java/org/apache/arrow/flight/StreamPipe.java   |   118 -
 .../org/apache/arrow/flight/SyncPutListener.java   |   122 -
 .../main/java/org/apache/arrow/flight/Ticket.java  |   102 -
 .../apache/arrow/flight/auth/AuthConstants.java    |    51 -
 .../arrow/flight/auth/BasicClientAuthHandler.java  |    58 -
 .../arrow/flight/auth/BasicServerAuthHandler.java  |    74 -
 .../arrow/flight/auth/ClientAuthHandler.java       |    55 -
 .../arrow/flight/auth/ClientAuthInterceptor.java   |    73 -
 .../arrow/flight/auth/ClientAuthWrapper.java       |   162 -
 .../arrow/flight/auth/ServerAuthHandler.java       |    72 -
 .../arrow/flight/auth/ServerAuthInterceptor.java   |    85 -
 .../arrow/flight/auth/ServerAuthWrapper.java       |   144 -
 .../apache/arrow/flight/auth2/Auth2Constants.java  |    31 -
 .../apache/arrow/flight/auth2/AuthUtilities.java   |    47 -
 .../flight/auth2/BasicAuthCredentialWriter.java    |    44 -
 .../flight/auth2/BasicCallHeaderAuthenticator.java |    88 -
 .../arrow/flight/auth2/BearerCredentialWriter.java |    39 -
 .../flight/auth2/BearerTokenAuthenticator.java     |    62 -
 .../flight/auth2/CallHeaderAuthenticator.java      |    86 -
 .../flight/auth2/ClientBearerHeaderHandler.java    |    36 -
 .../arrow/flight/auth2/ClientHandshakeWrapper.java |   100 -
 .../arrow/flight/auth2/ClientHeaderHandler.java    |    43 -
 .../auth2/ClientIncomingAuthHeaderMiddleware.java  |    78 -
 .../auth2/GeneratedBearerTokenAuthenticator.java   |   128 -
 .../auth2/ServerCallHeaderAuthMiddleware.java      |    74 -
 .../flight/client/ClientCookieMiddleware.java      |   130 -
 .../arrow/flight/example/ExampleFlightServer.java  |    93 -
 .../apache/arrow/flight/example/ExampleTicket.java |   141 -
 .../apache/arrow/flight/example/FlightHolder.java  |   131 -
 .../apache/arrow/flight/example/InMemoryStore.java |   176 -
 .../org/apache/arrow/flight/example/Stream.java    |   177 -
 .../integration/AuthBasicProtoScenario.java        |    97 -
 .../example/integration/IntegrationAssertions.java |    74 -
 .../example/integration/IntegrationTestClient.java |   197 -
 .../example/integration/IntegrationTestServer.java |    97 -
 .../example/integration/MiddlewareScenario.java    |   168 -
 .../arrow/flight/example/integration/Scenario.java |    45 -
 .../flight/example/integration/Scenarios.java      |    90 -
 .../arrow/flight/grpc/AddWritableBuffer.java       |   128 -
 .../arrow/flight/grpc/CallCredentialAdapter.java   |    53 -
 .../flight/grpc/ClientInterceptorAdapter.java      |   149 -
 .../grpc/ContextPropagatingExecutorService.java    |   117 -
 .../arrow/flight/grpc/CredentialCallOption.java    |    41 -
 .../arrow/flight/grpc/GetReadableBuffer.java       |    99 -
 .../apache/arrow/flight/grpc/MetadataAdapter.java  |    98 -
 .../arrow/flight/grpc/RequestContextAdapter.java   |    57 -
 .../flight/grpc/ServerInterceptorAdapter.java      |   145 -
 .../org/apache/arrow/flight/grpc/StatusUtils.java  |   230 -
 .../org/apache/arrow/flight/FlightTestUtil.java    |   150 -
 .../arrow/flight/TestApplicationMetadata.java      |   329 -
 .../java/org/apache/arrow/flight/TestAuth.java     |    93 -
 .../org/apache/arrow/flight/TestBackPressure.java  |   262 -
 .../apache/arrow/flight/TestBasicOperation.java    |   567 -
 .../org/apache/arrow/flight/TestCallOptions.java   |   191 -
 .../apache/arrow/flight/TestClientMiddleware.java  |   359 -
 .../apache/arrow/flight/TestDictionaryUtils.java   |    91 -
 .../org/apache/arrow/flight/TestDoExchange.java    |   536 -
 .../org/apache/arrow/flight/TestErrorMetadata.java |    92 -
 .../org/apache/arrow/flight/TestFlightClient.java  |   225 -
 .../org/apache/arrow/flight/TestFlightService.java |   125 -
 .../org/apache/arrow/flight/TestLargeMessage.java  |   165 -
 .../java/org/apache/arrow/flight/TestLeak.java     |   182 -
 .../apache/arrow/flight/TestMetadataVersion.java   |   319 -
 .../apache/arrow/flight/TestServerMiddleware.java  |   360 -
 .../org/apache/arrow/flight/TestServerOptions.java |   176 -
 .../test/java/org/apache/arrow/flight/TestTls.java |   145 -
 .../apache/arrow/flight/auth/TestBasicAuth.java    |   158 -
 .../apache/arrow/flight/auth2/TestBasicAuth2.java  |   232 -
 .../arrow/flight/client/TestCookieHandling.java    |   267 -
 .../arrow/flight/example/TestExampleServer.java    |   117 -
 .../arrow/flight/perf/PerformanceTestServer.java   |   216 -
 .../org/apache/arrow/flight/perf/TestPerf.java     |   199 -
 .../flight-core/src/test/protobuf/perf.proto       |    45 -
 .../flight-core/src/test/resources/logback.xml     |    28 -
 java/flight/flight-grpc/pom.xml                    |   132 -
 .../org/apache/arrow/flight/FlightGrpcUtils.java   |   161 -
 .../apache/arrow/flight/TestFlightGrpcUtils.java   |   193 -
 .../flight-grpc/src/test/protobuf/test.proto       |    26 -
 java/format/pom.xml                                |    46 -
 .../main/java/org/apache/arrow/flatbuf/Binary.java |    51 -
 .../main/java/org/apache/arrow/flatbuf/Block.java  |    61 -
 .../org/apache/arrow/flatbuf/BodyCompression.java  |    72 -
 .../arrow/flatbuf/BodyCompressionMethod.java       |    43 -
 .../main/java/org/apache/arrow/flatbuf/Bool.java   |    48 -
 .../main/java/org/apache/arrow/flatbuf/Buffer.java |    63 -
 .../org/apache/arrow/flatbuf/CompressionType.java  |    30 -
 .../main/java/org/apache/arrow/flatbuf/Date.java   |    65 -
 .../java/org/apache/arrow/flatbuf/DateUnit.java    |    30 -
 .../java/org/apache/arrow/flatbuf/Decimal.java     |    81 -
 .../org/apache/arrow/flatbuf/DictionaryBatch.java  |    79 -
 .../apache/arrow/flatbuf/DictionaryEncoding.java   |    88 -
 .../org/apache/arrow/flatbuf/DictionaryKind.java   |    36 -
 .../java/org/apache/arrow/flatbuf/Duration.java    |    57 -
 .../java/org/apache/arrow/flatbuf/Endianness.java  |    34 -
 .../java/org/apache/arrow/flatbuf/Feature.java     |    62 -
 .../main/java/org/apache/arrow/flatbuf/Field.java  |   120 -
 .../java/org/apache/arrow/flatbuf/FieldNode.java   |    68 -
 .../org/apache/arrow/flatbuf/FixedSizeBinary.java  |    60 -
 .../org/apache/arrow/flatbuf/FixedSizeList.java    |    60 -
 .../org/apache/arrow/flatbuf/FloatingPoint.java    |    57 -
 .../main/java/org/apache/arrow/flatbuf/Footer.java |   100 -
 .../main/java/org/apache/arrow/flatbuf/Int.java    |    61 -
 .../java/org/apache/arrow/flatbuf/Interval.java    |    57 -
 .../org/apache/arrow/flatbuf/IntervalUnit.java     |    30 -
 .../java/org/apache/arrow/flatbuf/KeyValue.java    |    70 -
 .../java/org/apache/arrow/flatbuf/LargeBinary.java |    52 -
 .../java/org/apache/arrow/flatbuf/LargeList.java   |    52 -
 .../java/org/apache/arrow/flatbuf/LargeUtf8.java   |    52 -
 .../main/java/org/apache/arrow/flatbuf/List.java   |    48 -
 .../main/java/org/apache/arrow/flatbuf/Map.java    |    87 -
 .../java/org/apache/arrow/flatbuf/Message.java     |    81 -
 .../org/apache/arrow/flatbuf/MessageHeader.java    |    44 -
 .../org/apache/arrow/flatbuf/MetadataVersion.java  |    54 -
 .../main/java/org/apache/arrow/flatbuf/Null.java   |    51 -
 .../java/org/apache/arrow/flatbuf/Precision.java   |    31 -
 .../java/org/apache/arrow/flatbuf/RecordBatch.java |   103 -
 .../main/java/org/apache/arrow/flatbuf/Schema.java |   102 -
 .../arrow/flatbuf/SparseMatrixCompressedAxis.java  |    30 -
 .../apache/arrow/flatbuf/SparseMatrixIndexCSX.java |   114 -
 .../org/apache/arrow/flatbuf/SparseTensor.java     |    92 -
 .../apache/arrow/flatbuf/SparseTensorIndex.java    |    32 -
 .../apache/arrow/flatbuf/SparseTensorIndexCOO.java |   118 -
 .../apache/arrow/flatbuf/SparseTensorIndexCSF.java |   173 -
 .../java/org/apache/arrow/flatbuf/Struct_.java     |    53 -
 .../main/java/org/apache/arrow/flatbuf/Tensor.java |    91 -
 .../java/org/apache/arrow/flatbuf/TensorDim.java   |    74 -
 .../main/java/org/apache/arrow/flatbuf/Time.java   |    66 -
 .../java/org/apache/arrow/flatbuf/TimeUnit.java    |    32 -
 .../java/org/apache/arrow/flatbuf/Timestamp.java   |    93 -
 .../main/java/org/apache/arrow/flatbuf/Type.java   |    55 -
 .../main/java/org/apache/arrow/flatbuf/Union.java  |    74 -
 .../java/org/apache/arrow/flatbuf/UnionMode.java   |    30 -
 .../main/java/org/apache/arrow/flatbuf/Utf8.java   |    51 -
 java/gandiva/CMakeLists.txt                        |    62 -
 java/gandiva/README.md                             |    32 -
 java/gandiva/pom.xml                               |   155 -
 .../gandiva/evaluator/ConfigurationBuilder.java    |    72 -
 .../arrow/gandiva/evaluator/DecimalTypeUtil.java   |    94 -
 .../gandiva/evaluator/ExpressionRegistry.java      |   220 -
 .../evaluator/ExpressionRegistryJniHelper.java     |    29 -
 .../org/apache/arrow/gandiva/evaluator/Filter.java |   199 -
 .../arrow/gandiva/evaluator/FunctionSignature.java |    93 -
 .../apache/arrow/gandiva/evaluator/JniLoader.java  |   170 -
 .../apache/arrow/gandiva/evaluator/JniWrapper.java |   120 -
 .../apache/arrow/gandiva/evaluator/Projector.java  |   364 -
 .../arrow/gandiva/evaluator/SelectionVector.java   |    87 -
 .../gandiva/evaluator/SelectionVectorInt16.java    |    49 -
 .../gandiva/evaluator/SelectionVectorInt32.java    |    48 -
 .../arrow/gandiva/evaluator/VectorExpander.java    |    69 -
 .../exceptions/EvaluatorClosedException.java       |    25 -
 .../arrow/gandiva/exceptions/GandivaException.java |    35 -
 .../exceptions/UnsupportedTypeException.java       |    27 -
 .../apache/arrow/gandiva/expression/AndNode.java   |    47 -
 .../arrow/gandiva/expression/ArrowTypeHelper.java  |   350 -
 .../arrow/gandiva/expression/BinaryNode.java       |    45 -
 .../arrow/gandiva/expression/BooleanNode.java      |    43 -
 .../apache/arrow/gandiva/expression/Condition.java |    42 -
 .../arrow/gandiva/expression/DecimalNode.java      |    49 -
 .../arrow/gandiva/expression/DoubleNode.java       |    43 -
 .../arrow/gandiva/expression/ExpressionTree.java   |    46 -
 .../apache/arrow/gandiva/expression/FieldNode.java |    43 -
 .../apache/arrow/gandiva/expression/FloatNode.java |    43 -
 .../arrow/gandiva/expression/FunctionNode.java     |    54 -
 .../apache/arrow/gandiva/expression/IfNode.java    |    52 -
 .../apache/arrow/gandiva/expression/InNode.java    |   121 -
 .../apache/arrow/gandiva/expression/IntNode.java   |    43 -
 .../apache/arrow/gandiva/expression/LongNode.java  |    43 -
 .../apache/arrow/gandiva/expression/NullNode.java  |    41 -
 .../apache/arrow/gandiva/expression/OrNode.java    |    47 -
 .../arrow/gandiva/expression/StringNode.java       |    48 -
 .../arrow/gandiva/expression/TreeBuilder.java      |   220 -
 .../apache/arrow/gandiva/expression/TreeNode.java  |    34 -
 .../arrow/gandiva/evaluator/BaseEvaluatorTest.java |   404 -
 .../gandiva/evaluator/DecimalTypeUtilTest.java     |    89 -
 .../gandiva/evaluator/ExpressionRegistryTest.java  |    65 -
 .../arrow/gandiva/evaluator/FilterProjectTest.java |   102 -
 .../apache/arrow/gandiva/evaluator/FilterTest.java |   315 -
 .../gandiva/evaluator/MicroBenchmarkTest.java      |   151 -
 .../gandiva/evaluator/ProjectorDecimalTest.java    |   797 -
 .../arrow/gandiva/evaluator/ProjectorTest.java     |  2237 --
 .../arrow/gandiva/evaluator/TestJniLoader.java     |    53 -
 .../gandiva/expression/ArrowTypeHelperTest.java    |   105 -
 .../arrow/gandiva/expression/TreeBuilderTest.java  |   350 -
 java/gandiva/src/test/resources/logback.xml        |    28 -
 java/memory/memory-core/pom.xml                    |    58 -
 .../java/org/apache/arrow/memory/Accountant.java   |   308 -
 .../apache/arrow/memory/AllocationListener.java    |    85 -
 .../org/apache/arrow/memory/AllocationManager.java |   221 -
 .../org/apache/arrow/memory/AllocationOutcome.java |    97 -
 .../arrow/memory/AllocationOutcomeDetails.java     |   132 -
 .../apache/arrow/memory/AllocationReservation.java |    88 -
 .../arrow/memory/AllocatorClosedException.java     |    35 -
 .../java/org/apache/arrow/memory/ArrowBuf.java     |  1245 -
 .../org/apache/arrow/memory/BaseAllocator.java     |   946 -
 .../org/apache/arrow/memory/BoundsChecking.java    |    63 -
 .../org/apache/arrow/memory/BufferAllocator.java   |   228 -
 .../java/org/apache/arrow/memory/BufferLedger.java |   525 -
 .../org/apache/arrow/memory/BufferManager.java     |    53 -
 .../org/apache/arrow/memory/CheckAllocator.java    |    87 -
 .../org/apache/arrow/memory/ChildAllocator.java    |    44 -
 .../memory/DefaultAllocationManagerOption.java     |   133 -
 .../arrow/memory/LowCostIdentityHashMap.java       |   336 -
 .../apache/arrow/memory/OutOfMemoryException.java  |    67 -
 .../apache/arrow/memory/OwnershipTransferNOOP.java |    39 -
 .../arrow/memory/OwnershipTransferResult.java      |    28 -
 .../main/java/org/apache/arrow/memory/README.md    |   121 -
 .../org/apache/arrow/memory/ReferenceManager.java  |   175 -
 .../org/apache/arrow/memory/RootAllocator.java     |    71 -
 .../apache/arrow/memory/ValueWithKeyIncluded.java  |    28 -
 .../java/org/apache/arrow/memory/package-info.java |    26 -
 .../memory/rounding/DefaultRoundingPolicy.java     |   114 -
 .../arrow/memory/rounding/RoundingPolicy.java      |    26 -
 .../memory/rounding/SegmentRoundingPolicy.java     |    60 -
 .../apache/arrow/memory/util/ArrowBufPointer.java  |   187 -
 .../apache/arrow/memory/util/AssertionUtil.java    |    40 -
 .../arrow/memory/util/AutoCloseableLock.java       |    43 -
 .../arrow/memory/util/ByteFunctionHelpers.java     |   347 -
 .../org/apache/arrow/memory/util/CommonUtil.java   |    79 -
 .../apache/arrow/memory/util/HistoricalLog.java    |   178 -
 .../apache/arrow/memory/util/LargeMemoryUtil.java  |    44 -
 .../org/apache/arrow/memory/util/MemoryUtil.java   |   170 -
 .../org/apache/arrow/memory/util/StackTrace.java   |    70 -
 .../arrow/memory/util/hash/ArrowBufHasher.java     |    47 -
 .../arrow/memory/util/hash/MurmurHasher.java       |   175 -
 .../arrow/memory/util/hash/SimpleHasher.java       |   116 -
 .../java/org/apache/arrow/util/AutoCloseables.java |   227 -
 .../java/org/apache/arrow/util/Collections2.java   |    68 -
 .../java/org/apache/arrow/util/Preconditions.java  |  1323 -
 .../org/apache/arrow/util/VisibleForTesting.java   |    26 -
 .../memory/DefaultAllocationManagerFactory.java    |    63 -
 .../org/apache/arrow/memory/TestAccountant.java    |   172 -
 .../apache/arrow/memory/TestAllocationManager.java |    39 -
 .../java/org/apache/arrow/memory/TestArrowBuf.java |   149 -
 .../apache/arrow/memory/TestBoundaryChecking.java  |   150 -
 .../arrow/memory/TestLowCostIdentityHashMap.java   |   169 -
 .../arrow/memory/util/TestArrowBufPointer.java     |   216 -
 .../arrow/memory/util/TestByteFunctionHelpers.java |   167 -
 .../arrow/memory/util/TestLargeMemoryUtil.java     |   105 -
 .../arrow/memory/util/hash/TestArrowBufHasher.java |   123 -
 java/memory/memory-netty/pom.xml                   |   101 -
 .../java/io/netty/buffer/ExpandableByteBuf.java    |    56 -
 .../src/main/java/io/netty/buffer/LargeBuffer.java |    34 -
 .../io/netty/buffer/MutableWrappedByteBuf.java     |   448 -
 .../main/java/io/netty/buffer/NettyArrowBuf.java   |   622 -
 .../io/netty/buffer/PooledByteBufAllocatorL.java   |   280 -
 .../io/netty/buffer/UnsafeDirectLittleEndian.java  |   270 -
 .../apache/arrow/memory/ArrowByteBufAllocator.java |   161 -
 .../memory/DefaultAllocationManagerFactory.java    |    38 -
 .../arrow/memory/NettyAllocationManager.java       |   123 -
 .../java/io/netty/buffer/TestNettyArrowBuf.java    |   141 -
 .../netty/buffer/TestUnsafeDirectLittleEndian.java |    77 -
 .../apache/arrow/memory/ITTestLargeArrowBuf.java   |    72 -
 .../arrow/memory/TestAllocationManagerNetty.java   |    39 -
 .../org/apache/arrow/memory/TestBaseAllocator.java |  1183 -
 .../org/apache/arrow/memory/TestEmptyArrowBuf.java |    88 -
 .../org/apache/arrow/memory/TestEndianness.java    |    51 -
 .../arrow/memory/TestNettyAllocationManager.java   |   108 -
 .../memory-netty/src/test/resources/logback.xml    |    28 -
 java/memory/memory-unsafe/pom.xml                  |    52 -
 .../memory/DefaultAllocationManagerFactory.java    |    37 -
 .../arrow/memory/UnsafeAllocationManager.java      |    70 -
 .../arrow/memory/TestAllocationManagerUnsafe.java  |    41 -
 .../arrow/memory/TestUnsafeAllocationManager.java  |    68 -
 java/memory/pom.xml                                |    29 -
 .../io/netty/buffer/TestExpandableByteBuf.java     |   117 -
 java/performance/pom.xml                           |   222 -
 .../arrow/adapter/AvroAdapterBenchmarks.java       |   141 -
 .../arrow/adapter/jdbc/JdbcAdapterBenchmarks.java  |   359 -
 .../search/ParallelSearcherBenchmarks.java         |   115 -
 .../apache/arrow/memory/AllocatorBenchmarks.java   |    95 -
 .../apache/arrow/memory/ArrowBufBenchmarks.java    |    82 -
 .../memory/util/ArrowBufPointerBenchmarks.java     |   107 -
 .../memory/util/ByteFunctionHelpersBenchmarks.java |   138 -
 .../arrow/vector/BaseValueVectorBenchmarks.java    |    95 -
 .../arrow/vector/BitVectorHelperBenchmarks.java    |   229 -
 .../arrow/vector/DecimalVectorBenchmarks.java      |   121 -
 .../org/apache/arrow/vector/Float8Benchmarks.java  |   122 -
 .../arrow/vector/FloatingPointBenchmarks.java      |   134 -
 .../org/apache/arrow/vector/IntBenchmarks.java     |   110 -
 .../org/apache/arrow/vector/VarCharBenchmarks.java |   102 -
 .../vector/VariableWidthVectorBenchmarks.java      |   130 -
 .../apache/arrow/vector/VectorLoaderBenchmark.java |   117 -
 .../arrow/vector/VectorUnloaderBenchmark.java      |   109 -
 .../dictionary/DictionaryEncoderBenchmarks.java    |   147 -
 .../arrow/vector/ipc/WriteChannelBenchmark.java    |    87 -
 .../ipc/message/ArrowRecordBatchBenchmarks.java    |    98 -
 .../arrow/vector/util/TransferPairBenchmarks.java  |   123 -
 java/plasma/README.md                              |    39 -
 java/plasma/pom.xml                                |    34 -
 .../org/apache/arrow/plasma/ObjectStoreLink.java   |   131 -
 .../java/org/apache/arrow/plasma/PlasmaClient.java |   184 -
 .../org/apache/arrow/plasma/PlasmaClientJNI.java   |    57 -
 .../exceptions/DuplicateObjectException.java       |    32 -
 .../plasma/exceptions/PlasmaClientException.java   |    32 -
 .../exceptions/PlasmaOutOfMemoryException.java     |    40 -
 .../org/apache/arrow/plasma/PlasmaClientTest.java  |   313 -
 java/plasma/src/test/resources/logback.xml         |    28 -
 java/plasma/test.sh                                |    56 -
 java/pom.xml                                       |   764 -
 java/tools/pom.xml                                 |   106 -
 .../java/org/apache/arrow/tools/EchoServer.java    |   146 -
 .../java/org/apache/arrow/tools/FileRoundtrip.java |   123 -
 .../java/org/apache/arrow/tools/FileToStream.java  |    78 -
 .../java/org/apache/arrow/tools/Integration.java   |   244 -
 .../java/org/apache/arrow/tools/StreamToFile.java  |    76 -
 .../apache/arrow/tools/ArrowFileTestFixtures.java  |   105 -
 .../org/apache/arrow/tools/EchoServerTest.java     |   301 -
 .../org/apache/arrow/tools/TestFileRoundtrip.java  |    65 -
 .../org/apache/arrow/tools/TestIntegration.java    |   288 -
 java/tools/src/test/resources/logback.xml          |    27 -
 java/vector/pom.xml                                |   291 -
 java/vector/src/main/codegen/config.fmpp           |    24 -
 java/vector/src/main/codegen/data/ArrowTypes.tdd   |   124 -
 .../src/main/codegen/data/ValueVectorTypes.tdd     |   196 -
 java/vector/src/main/codegen/includes/license.ftl  |    16 -
 .../src/main/codegen/includes/vv_imports.ftl       |    60 -
 .../codegen/templates/AbstractFieldReader.java     |   128 -
 .../codegen/templates/AbstractFieldWriter.java     |   177 -
 .../templates/AbstractPromotableFieldWriter.java   |   189 -
 .../src/main/codegen/templates/ArrowType.java      |   375 -
 .../src/main/codegen/templates/BaseReader.java     |    75 -
 .../src/main/codegen/templates/BaseWriter.java     |   116 -
 .../templates/CaseSensitiveStructWriters.java      |    53 -
 .../src/main/codegen/templates/ComplexCopier.java  |   191 -
 .../src/main/codegen/templates/ComplexReaders.java |   147 -
 .../src/main/codegen/templates/ComplexWriters.java |   211 -
 .../main/codegen/templates/DenseUnionReader.java   |   217 -
 .../main/codegen/templates/DenseUnionVector.java   |   896 -
 .../main/codegen/templates/DenseUnionWriter.java   |   262 -
 .../main/codegen/templates/HolderReaderImpl.java   |   170 -
 .../src/main/codegen/templates/NullReader.java     |   147 -
 .../src/main/codegen/templates/StructWriters.java  |   286 -
 .../templates/UnionFixedSizeListWriter.java        |   296 -
 .../main/codegen/templates/UnionListWriter.java    |   303 -
 .../src/main/codegen/templates/UnionMapWriter.java |   210 -
 .../src/main/codegen/templates/UnionReader.java    |   210 -
 .../src/main/codegen/templates/UnionVector.java    |   800 -
 .../src/main/codegen/templates/UnionWriter.java    |   278 -
 .../src/main/codegen/templates/ValueHolders.java   |    79 -
 .../org/apache/arrow/vector/AddOrGetResult.java    |    46 -
 .../org/apache/arrow/vector/AllocationHelper.java  |    95 -
 .../apache/arrow/vector/BaseFixedWidthVector.java  |   923 -
 .../org/apache/arrow/vector/BaseIntVector.java     |    43 -
 .../arrow/vector/BaseLargeVariableWidthVector.java |  1363 -
 .../org/apache/arrow/vector/BaseValueVector.java   |   232 -
 .../arrow/vector/BaseVariableWidthVector.java      |  1414 -
 .../java/org/apache/arrow/vector/BigIntVector.java |   358 -
 .../java/org/apache/arrow/vector/BitVector.java    |   599 -
 .../org/apache/arrow/vector/BitVectorHelper.java   |   459 -
 .../java/org/apache/arrow/vector/BufferBacked.java |    31 -
 .../java/org/apache/arrow/vector/BufferLayout.java |   153 -
 .../org/apache/arrow/vector/DateDayVector.java     |   347 -
 .../org/apache/arrow/vector/DateMilliVector.java   |   350 -
 .../org/apache/arrow/vector/Decimal256Vector.java  |   584 -
 .../org/apache/arrow/vector/DecimalVector.java     |   584 -
 .../apache/arrow/vector/DensityAwareVector.java    |    57 -
 .../org/apache/arrow/vector/DurationVector.java    |   406 -
 .../arrow/vector/ElementAddressableVector.java     |    42 -
 .../apache/arrow/vector/ExtensionTypeVector.java   |   266 -
 .../java/org/apache/arrow/vector/FieldVector.java  |    90 -
 .../apache/arrow/vector/FixedSizeBinaryVector.java |   386 -
 .../org/apache/arrow/vector/FixedWidthVector.java  |    36 -
 .../java/org/apache/arrow/vector/Float4Vector.java |   361 -
 .../java/org/apache/arrow/vector/Float8Vector.java |   362 -
 .../apache/arrow/vector/FloatingPointVector.java   |    46 -
 .../apache/arrow/vector/GenerateSampleData.java    |   337 -
 .../java/org/apache/arrow/vector/IntVector.java    |   362 -
 .../org/apache/arrow/vector/IntervalDayVector.java |   433 -
 .../apache/arrow/vector/IntervalYearVector.java    |   382 -
 .../apache/arrow/vector/LargeVarBinaryVector.java  |   305 -
 .../apache/arrow/vector/LargeVarCharVector.java    |   331 -
 .../apache/arrow/vector/NullCheckingForGet.java    |    84 -
 .../java/org/apache/arrow/vector/NullVector.java   |   292 -
 .../apache/arrow/vector/SchemaChangeCallBack.java  |    57 -
 .../org/apache/arrow/vector/SmallIntVector.java    |   389 -
 .../org/apache/arrow/vector/TimeMicroVector.java   |   347 -
 .../org/apache/arrow/vector/TimeMilliVector.java   |   351 -
 .../org/apache/arrow/vector/TimeNanoVector.java    |   347 -
 .../org/apache/arrow/vector/TimeSecVector.java     |   348 -
 .../arrow/vector/TimeStampMicroTZVector.java       |   239 -
 .../apache/arrow/vector/TimeStampMicroVector.java  |   236 -
 .../arrow/vector/TimeStampMilliTZVector.java       |   238 -
 .../apache/arrow/vector/TimeStampMilliVector.java  |   236 -
 .../apache/arrow/vector/TimeStampNanoTZVector.java |   241 -
 .../apache/arrow/vector/TimeStampNanoVector.java   |   236 -
 .../apache/arrow/vector/TimeStampSecTZVector.java  |   238 -
 .../apache/arrow/vector/TimeStampSecVector.java    |   237 -
 .../org/apache/arrow/vector/TimeStampVector.java   |   197 -
 .../org/apache/arrow/vector/TinyIntVector.java     |   390 -
 .../java/org/apache/arrow/vector/TypeLayout.java   |   446 -
 .../java/org/apache/arrow/vector/UInt1Vector.java  |   364 -
 .../java/org/apache/arrow/vector/UInt2Vector.java  |   339 -
 .../java/org/apache/arrow/vector/UInt4Vector.java  |   334 -
 .../java/org/apache/arrow/vector/UInt8Vector.java  |   330 -
 .../java/org/apache/arrow/vector/ValueVector.java  |   285 -
 .../org/apache/arrow/vector/VarBinaryVector.java   |   306 -
 .../org/apache/arrow/vector/VarCharVector.java     |   331 -
 .../apache/arrow/vector/VariableWidthVector.java   |    53 -
 .../arrow/vector/VectorDefinitionSetter.java       |    26 -
 .../java/org/apache/arrow/vector/VectorLoader.java |   137 -
 .../org/apache/arrow/vector/VectorSchemaRoot.java  |   429 -
 .../org/apache/arrow/vector/VectorUnloader.java    |   107 -
 .../java/org/apache/arrow/vector/ZeroVector.java   |   107 -
 .../arrow/vector/compare/ApproxEqualsVisitor.java  |   147 -
 .../org/apache/arrow/vector/compare/Range.java     |    85 -
 .../arrow/vector/compare/RangeEqualsVisitor.java   |   550 -
 .../arrow/vector/compare/TypeEqualsVisitor.java    |   148 -
 .../arrow/vector/compare/VectorEqualsVisitor.java  |    60 -
 .../arrow/vector/compare/VectorValueEqualizer.java |    44 -
 .../apache/arrow/vector/compare/VectorVisitor.java |    58 -
 .../compare/util/ValueEpsilonEqualizers.java       |   149 -
 .../vector/complex/AbstractContainerVector.java    |   140 -
 .../arrow/vector/complex/AbstractStructVector.java |   425 -
 .../arrow/vector/complex/BaseListVector.java       |    36 -
 .../vector/complex/BaseRepeatedValueVector.java    |   361 -
 .../arrow/vector/complex/EmptyValuePopulator.java  |    51 -
 .../arrow/vector/complex/FixedSizeListVector.java  |   683 -
 .../arrow/vector/complex/LargeListVector.java      |  1022 -
 .../apache/arrow/vector/complex/ListVector.java    |   893 -
 .../org/apache/arrow/vector/complex/MapVector.java |   122 -
 .../vector/complex/NonNullableStructVector.java    |   440 -
 .../apache/arrow/vector/complex/Positionable.java  |    29 -
 .../arrow/vector/complex/PromotableVector.java     |    32 -
 .../complex/RepeatedFixedWidthVectorLike.java      |    32 -
 .../arrow/vector/complex/RepeatedValueVector.java  |    46 -
 .../complex/RepeatedVariableWidthVectorLike.java   |    40 -
 .../org/apache/arrow/vector/complex/StateTool.java |    44 -
 .../apache/arrow/vector/complex/StructVector.java  |   600 -
 .../arrow/vector/complex/VectorWithOrdinal.java    |    34 -
 .../vector/complex/impl/AbstractBaseReader.java    |   112 -
 .../vector/complex/impl/AbstractBaseWriter.java    |    55 -
 .../vector/complex/impl/ComplexWriterImpl.java     |   227 -
 .../complex/impl/NullableStructReaderImpl.java     |    59 -
 .../complex/impl/NullableStructWriterFactory.java  |    48 -
 .../vector/complex/impl/PromotableWriter.java      |   397 -
 .../vector/complex/impl/SingleListReaderImpl.java  |    91 -
 .../complex/impl/SingleStructReaderImpl.java       |   113 -
 .../complex/impl/StructOrListWriterImpl.java       |   137 -
 .../complex/impl/UnionFixedSizeListReader.java     |   105 -
 .../vector/complex/impl/UnionLargeListReader.java  |   109 -
 .../arrow/vector/complex/impl/UnionListReader.java |   107 -
 .../arrow/vector/complex/impl/UnionMapReader.java  |    77 -
 .../arrow/vector/complex/reader/FieldReader.java   |    32 -
 .../arrow/vector/complex/writer/FieldWriter.java   |    32 -
 .../compression/AbstractCompressionCodec.java      |   116 -
 .../arrow/vector/compression/CompressionCodec.java |    62 -
 .../arrow/vector/compression/CompressionUtil.java  |   103 -
 .../vector/compression/NoCompressionCodec.java     |    67 -
 .../apache/arrow/vector/dictionary/Dictionary.java |    75 -
 .../arrow/vector/dictionary/DictionaryEncoder.java |   196 -
 .../vector/dictionary/DictionaryHashTable.java     |   295 -
 .../vector/dictionary/DictionaryProvider.java      |    62 -
 .../vector/dictionary/ListSubfieldEncoder.java     |   137 -
 .../vector/dictionary/StructSubfieldEncoder.java   |   196 -
 .../apache/arrow/vector/holders/ComplexHolder.java |    28 -
 .../arrow/vector/holders/DenseUnionHolder.java     |    38 -
 .../arrow/vector/holders/RepeatedListHolder.java   |    26 -
 .../arrow/vector/holders/RepeatedStructHolder.java |    26 -
 .../apache/arrow/vector/holders/UnionHolder.java   |    37 -
 .../apache/arrow/vector/holders/ValueHolder.java   |    31 -
 .../apache/arrow/vector/ipc/ArrowFileReader.java   |   230 -
 .../apache/arrow/vector/ipc/ArrowFileWriter.java   |   119 -
 .../org/apache/arrow/vector/ipc/ArrowMagic.java    |    44 -
 .../org/apache/arrow/vector/ipc/ArrowReader.java   |   255 -
 .../apache/arrow/vector/ipc/ArrowStreamReader.java |   229 -
 .../apache/arrow/vector/ipc/ArrowStreamWriter.java |    86 -
 .../org/apache/arrow/vector/ipc/ArrowWriter.java   |   210 -
 .../vector/ipc/InvalidArrowFileException.java      |    30 -
 .../apache/arrow/vector/ipc/JsonFileReader.java    |   783 -
 .../apache/arrow/vector/ipc/JsonFileWriter.java    |   409 -
 .../org/apache/arrow/vector/ipc/ReadChannel.java   |   102 -
 .../arrow/vector/ipc/SeekableReadChannel.java      |    43 -
 .../org/apache/arrow/vector/ipc/WriteChannel.java  |   162 -
 .../arrow/vector/ipc/message/ArrowBlock.java       |    95 -
 .../vector/ipc/message/ArrowBodyCompression.java   |    55 -
 .../arrow/vector/ipc/message/ArrowBuffer.java      |    90 -
 .../vector/ipc/message/ArrowDictionaryBatch.java   |    94 -
 .../arrow/vector/ipc/message/ArrowFieldNode.java   |    64 -
 .../arrow/vector/ipc/message/ArrowFooter.java      |   226 -
 .../arrow/vector/ipc/message/ArrowMessage.java     |    42 -
 .../arrow/vector/ipc/message/ArrowRecordBatch.java |   259 -
 .../arrow/vector/ipc/message/FBSerializable.java   |    30 -
 .../arrow/vector/ipc/message/FBSerializables.java  |    67 -
 .../apache/arrow/vector/ipc/message/IpcOption.java |    44 -
 .../vector/ipc/message/MessageChannelReader.java   |    91 -
 .../vector/ipc/message/MessageMetadataResult.java  |   115 -
 .../arrow/vector/ipc/message/MessageResult.java    |    61 -
 .../vector/ipc/message/MessageSerializer.java      |   731 -
 .../org/apache/arrow/vector/types/DateUnit.java    |    50 -
 .../arrow/vector/types/FloatingPointPrecision.java |    55 -
 .../apache/arrow/vector/types/IntervalUnit.java    |    50 -
 .../apache/arrow/vector/types/MetadataVersion.java |    65 -
 .../org/apache/arrow/vector/types/TimeUnit.java    |    50 -
 .../java/org/apache/arrow/vector/types/Types.java  |   998 -
 .../org/apache/arrow/vector/types/UnionMode.java   |    57 -
 .../vector/types/pojo/DictionaryEncoding.java      |    88 -
 .../vector/types/pojo/ExtensionTypeRegistry.java   |    42 -
 .../org/apache/arrow/vector/types/pojo/Field.java  |   325 -
 .../apache/arrow/vector/types/pojo/FieldType.java  |   124 -
 .../org/apache/arrow/vector/types/pojo/Schema.java |   244 -
 .../util/ByteArrayReadableSeekableByteChannel.java |    86 -
 .../arrow/vector/util/ByteFunctionHelpers.java     |   112 -
 .../org/apache/arrow/vector/util/CallBack.java     |    25 -
 .../arrow/vector/util/DataSizeRoundingUtil.java    |    99 -
 .../org/apache/arrow/vector/util/DateUtility.java  |   134 -
 .../apache/arrow/vector/util/DecimalUtility.java   |   188 -
 .../arrow/vector/util/DictionaryUtility.java       |   145 -
 .../util/ElementAddressableVectorIterator.java     |    86 -
 .../arrow/vector/util/JsonStringArrayList.java     |    55 -
 .../arrow/vector/util/JsonStringHashMap.java       |    48 -
 .../apache/arrow/vector/util/MapWithOrdinal.java   |    67 -
 .../arrow/vector/util/MapWithOrdinalImpl.java      |   248 -
 .../arrow/vector/util/MultiMapWithOrdinal.java     |   230 -
 .../vector/util/OversizedAllocationException.java  |    52 -
 .../vector/util/PromotableMultiMapWithOrdinal.java |   133 -
 .../vector/util/SchemaChangeRuntimeException.java  |    48 -
 .../apache/arrow/vector/util/SchemaUtility.java    |    63 -
 .../java/org/apache/arrow/vector/util/Text.java    |   688 -
 .../org/apache/arrow/vector/util/TransferPair.java |    33 -
 .../org/apache/arrow/vector/util/Validator.java    |   190 -
 .../arrow/vector/util/ValueVectorUtility.java      |   172 -
 .../apache/arrow/vector/util/VectorAppender.java   |   533 -
 .../arrow/vector/util/VectorBatchAppender.java     |    39 -
 .../vector/util/VectorSchemaRootAppender.java      |    83 -
 .../vector/validate/MetadataV4UnionChecker.java    |    82 -
 .../apache/arrow/vector/validate/ValidateUtil.java |    61 -
 .../validate/ValidateVectorBufferVisitor.java      |   239 -
 .../vector/validate/ValidateVectorDataVisitor.java |   173 -
 .../vector/validate/ValidateVectorTypeVisitor.java |   356 -
 .../vector/validate/ValidateVectorVisitor.java     |   266 -
 .../java/org/apache/arrow/util/TestSchemaUtil.java |    51 -
 .../apache/arrow/vector/DirtyRootAllocator.java    |    52 -
 .../org/apache/arrow/vector/ITTestLargeVector.java |   280 -
 .../org/apache/arrow/vector/TestBitVector.java     |   543 -
 .../apache/arrow/vector/TestBitVectorHelper.java   |   235 -
 .../arrow/vector/TestBufferOwnershipTransfer.java  |   131 -
 .../java/org/apache/arrow/vector/TestCopyFrom.java |  1104 -
 .../apache/arrow/vector/TestDecimal256Vector.java  |   357 -
 .../org/apache/arrow/vector/TestDecimalVector.java |   365 -
 .../apache/arrow/vector/TestDenseUnionVector.java  |   625 -
 .../apache/arrow/vector/TestDictionaryVector.java  |  1031 -
 .../apache/arrow/vector/TestDurationVector.java    |   137 -
 .../arrow/vector/TestFixedSizeBinaryVector.java    |   279 -
 .../arrow/vector/TestFixedSizeListVector.java      |   506 -
 .../arrow/vector/TestIntervalYearVector.java       |    58 -
 .../apache/arrow/vector/TestLargeListVector.java   |   982 -
 .../arrow/vector/TestLargeVarBinaryVector.java     |   104 -
 .../arrow/vector/TestLargeVarCharVector.java       |   816 -
 .../org/apache/arrow/vector/TestListVector.java    |   981 -
 .../org/apache/arrow/vector/TestMapVector.java     |   702 -
 .../arrow/vector/TestNullCheckingForGet.java       |    92 -
 .../vector/TestOutOfMemoryForValueVector.java      |    73 -
 .../TestOversizedAllocationForValueVector.java     |   132 -
 .../apache/arrow/vector/TestSplitAndTransfer.java  |   410 -
 .../org/apache/arrow/vector/TestStructVector.java  |   183 -
 .../org/apache/arrow/vector/TestTypeLayout.java    |    98 -
 .../org/apache/arrow/vector/TestUnionVector.java   |   433 -
 .../java/org/apache/arrow/vector/TestUtils.java    |    45 -
 .../org/apache/arrow/vector/TestValueVector.java   |  3023 --
 .../apache/arrow/vector/TestVarCharListVector.java |    77 -
 .../org/apache/arrow/vector/TestVectorAlloc.java   |   106 -
 .../org/apache/arrow/vector/TestVectorReAlloc.java |   474 -
 .../org/apache/arrow/vector/TestVectorReset.java   |   168 -
 .../apache/arrow/vector/TestVectorSchemaRoot.java  |   318 -
 .../apache/arrow/vector/TestVectorUnloadLoad.java  |   332 -
 .../vector/compare/TestRangeEqualsVisitor.java     |   739 -
 .../vector/compare/TestTypeEqualsVisitor.java      |   185 -
 .../vector/complex/impl/TestComplexCopier.java     |   763 -
 .../vector/complex/impl/TestPromotableWriter.java  |   167 -
 .../vector/complex/writer/TestComplexWriter.java   |  1258 -
 .../org/apache/arrow/vector/ipc/BaseFileTest.java  |   850 -
 .../vector/ipc/ITTestIPCWithLargeArrowBuffers.java |   187 -
 .../arrow/vector/ipc/MessageSerializerTest.java    |   227 -
 .../org/apache/arrow/vector/ipc/TestArrowFile.java |   134 -
 .../apache/arrow/vector/ipc/TestArrowFooter.java   |    68 -
 .../arrow/vector/ipc/TestArrowReaderWriter.java    |   882 -
 .../apache/arrow/vector/ipc/TestArrowStream.java   |   147 -
 .../arrow/vector/ipc/TestArrowStreamPipe.java      |   161 -
 .../org/apache/arrow/vector/ipc/TestJSONFile.java  |   458 -
 .../org/apache/arrow/vector/ipc/TestRoundTrip.java |   628 -
 .../vector/ipc/TestUIntDictionaryRoundTrip.java    |   246 -
 .../ipc/message/TestMessageMetadataResult.java     |    36 -
 .../org/apache/arrow/vector/pojo/TestConvert.java  |   169 -
 .../arrow/vector/testing/RandomDataGenerator.java  |    44 -
 .../vector/testing/TestValueVectorPopulator.java   |   604 -
 .../vector/testing/ValueVectorDataPopulator.java   |   708 -
 .../arrow/vector/types/pojo/TestExtensionType.java |   240 -
 .../apache/arrow/vector/types/pojo/TestField.java  |    63 -
 .../apache/arrow/vector/types/pojo/TestSchema.java |   254 -
 .../arrow/vector/util/DecimalUtilityTest.java      |   127 -
 .../vector/util/TestDataSizeRoundingUtil.java      |    76 -
 .../util/TestElementAddressableVectorIterator.java |   134 -
 .../arrow/vector/util/TestMultiMapWithOrdinal.java |    60 -
 .../apache/arrow/vector/util/TestValidator.java    |    56 -
 .../arrow/vector/util/TestVectorAppender.java      |   794 -
 .../arrow/vector/util/TestVectorBatchAppender.java |    72 -
 .../vector/util/TestVectorSchemaRootAppender.java  |   161 -
 .../arrow/vector/validate/TestValidateVector.java  |   260 -
 .../vector/validate/TestValidateVectorFull.java    |   234 -
 .../validate/TestValidateVectorSchemaRoot.java     |   101 -
 .../validate/TestValidateVectorTypeVisitor.java    |   301 -
 java/vector/src/test/resources/logback.xml         |    28 -
 js/.eslintignore                                   |     1 -
 js/.eslintrc.js                                    |    87 -
 js/.gitignore                                      |    91 -
 js/.npmrc                                          |     2 -
 js/.vscode/launch.json                             |   182 -
 js/DEVELOP.md                                      |   114 -
 js/README.md                                       |   280 -
 js/bin/arrow2csv.js                                |    28 -
 js/bin/file-to-stream.js                           |    40 -
 js/bin/integration.js                              |   255 -
 js/bin/json-to-arrow.js                            |   108 -
 js/bin/print-buffer-alignment.js                   |    81 -
 js/bin/stream-to-file.js                           |    40 -
 js/examples/read_file.html                         |    91 -
 js/gulp/argv.js                                    |    39 -
 js/gulp/arrow-task.js                              |    64 -
 js/gulp/clean-task.js                              |    30 -
 js/gulp/closure-task.js                            |   215 -
 js/gulp/compile-task.js                            |    37 -
 js/gulp/memoize-task.js                            |    38 -
 js/gulp/minify-task.js                             |    90 -
 js/gulp/package-task.js                            |    98 -
 js/gulp/test-task.js                               |   176 -
 js/gulp/typescript-task.js                         |    69 -
 js/gulp/util.js                                    |   218 -
 js/gulpfile.js                                     |   102 -
 js/index.js                                        |    18 -
 js/index.mjs                                       |    18 -
 js/index.ts                                        |    18 -
 js/jest.config.js                                  |    56 -
 js/jest.coverage.config.js                         |    30 -
 js/lerna.json                                      |    10 -
 js/npm-release.sh                                  |    26 -
 js/package.json                                    |   111 -
 js/perf/config.js                                  |    30 -
 js/perf/index.js                                   |   248 -
 js/perf/table_config.js                            |    48 -
 js/src/Arrow.dom.ts                                |   112 -
 js/src/Arrow.node.ts                               |    32 -
 js/src/Arrow.ts                                    |   134 -
 js/src/bin/arrow2csv.ts                            |   334 -
 js/src/builder.ts                                  |   527 -
 js/src/builder/binary.ts                           |    54 -
 js/src/builder/bool.ts                             |    31 -
 js/src/builder/buffer.ts                           |   182 -
 js/src/builder/date.ts                             |    26 -
 js/src/builder/decimal.ts                          |    22 -
 js/src/builder/dictionary.ts                       |    98 -
 js/src/builder/fixedsizebinary.ts                  |    22 -
 js/src/builder/fixedsizelist.ts                    |    41 -
 js/src/builder/float.ts                            |    45 -
 js/src/builder/index.ts                            |    82 -
 js/src/builder/int.ts                              |    80 -
 js/src/builder/interval.ts                         |    26 -
 js/src/builder/list.ts                             |    58 -
 js/src/builder/map.ts                              |    64 -
 js/src/builder/null.ts                             |    29 -
 js/src/builder/run.ts                              |    34 -
 js/src/builder/struct.ts                           |    29 -
 js/src/builder/time.ts                             |    30 -
 js/src/builder/timestamp.ts                        |    30 -
 js/src/builder/union.ts                            |    96 -
 js/src/builder/utf8.ts                             |    44 -
 js/src/builder/valid.ts                            |    77 -
 js/src/column.ts                                   |   136 -
 js/src/compute/dataframe.ts                        |   283 -
 js/src/compute/predicate.ts                        |   292 -
 js/src/data.ts                                     |   295 -
 js/src/enum.ts                                     |   142 -
 js/src/fb/.eslintrc.js                             |    23 -
 js/src/fb/File.ts                                  |   300 -
 js/src/fb/Message.ts                               |   709 -
 js/src/fb/Schema.ts                                |  2658 --
 js/src/interfaces.ts                               |   403 -
 js/src/io/adapters.ts                              |   398 -
 js/src/io/file.ts                                  |   115 -
 js/src/io/interfaces.ts                            |   179 -
 js/src/io/node/builder.ts                          |    98 -
 js/src/io/node/iterable.ts                         |   113 -
 js/src/io/node/reader.ts                           |    86 -
 js/src/io/node/writer.ts                           |    77 -
 js/src/io/stream.ts                                |   152 -
 js/src/io/whatwg/builder.ts                        |   116 -
 js/src/io/whatwg/iterable.ts                       |    93 -
 js/src/io/whatwg/reader.ts                         |    52 -
 js/src/io/whatwg/writer.ts                         |    50 -
 js/src/ipc/message.ts                              |   257 -
 js/src/ipc/metadata/file.ts                        |   163 -
 js/src/ipc/metadata/json.ts                        |   206 -
 js/src/ipc/metadata/message.ts                     |   621 -
 js/src/ipc/reader.ts                               |   739 -
 js/src/ipc/writer.ts                               |   492 -
 js/src/recordbatch.ts                              |   151 -
 js/src/schema.ts                                   |   154 -
 js/src/table.ts                                    |   294 -
 js/src/type.ts                                     |   613 -
 js/src/util/args.ts                                |   153 -
 js/src/util/bit.ts                                 |   161 -
 js/src/util/bn.ts                                  |   231 -
 js/src/util/buffer.ts                              |   235 -
 js/src/util/compat.ts                              |   178 -
 js/src/util/fn.ts                                  |    31 -
 js/src/util/int.ts                                 |   440 -
 js/src/util/math.ts                                |   105 -
 js/src/util/pretty.ts                              |    37 -
 js/src/util/recordbatch.ts                         |   121 -
 js/src/util/utf8.ts                                |    48 -
 js/src/util/vector.ts                              |   198 -
 js/src/vector.ts                                   |    73 -
 js/src/vector/base.ts                              |   111 -
 js/src/vector/binary.ts                            |    27 -
 js/src/vector/bool.ts                              |    35 -
 js/src/vector/chunked.ts                           |   320 -
 js/src/vector/date.ts                              |    51 -
 js/src/vector/decimal.ts                           |    22 -
 js/src/vector/dictionary.ts                        |    60 -
 js/src/vector/fixedsizebinary.ts                   |    22 -
 js/src/vector/fixedsizelist.ts                     |    22 -
 js/src/vector/float.ts                             |   144 -
 js/src/vector/index.ts                             |   207 -
 js/src/vector/int.ts                               |   195 -
 js/src/vector/interval.ts                          |    26 -
 js/src/vector/list.ts                              |    22 -
 js/src/vector/map.ts                               |    35 -
 js/src/vector/null.ts                              |    22 -
 js/src/vector/row.ts                               |   296 -
 js/src/vector/struct.ts                            |    32 -
 js/src/vector/time.ts                              |    30 -
 js/src/vector/timestamp.ts                         |    30 -
 js/src/vector/union.ts                             |    32 -
 js/src/vector/utf8.ts                              |    39 -
 js/src/visitor.ts                                  |   260 -
 js/src/visitor/builderctor.ts                      |    98 -
 js/src/visitor/bytewidth.ts                        |    68 -
 js/src/visitor/get.ts                              |   321 -
 js/src/visitor/indexof.ts                          |   183 -
 js/src/visitor/iterator.ts                         |   193 -
 js/src/visitor/jsontypeassembler.ts                |    91 -
 js/src/visitor/jsonvectorassembler.ts              |   177 -
 js/src/visitor/set.ts                              |   354 -
 js/src/visitor/toarray.ts                          |   151 -
 js/src/visitor/typeassembler.ts                    |   158 -
 js/src/visitor/typecomparator.ts                   |   280 -
 js/src/visitor/typector.ts                         |    82 -
 js/src/visitor/vectorassembler.ts                  |   234 -
 js/src/visitor/vectorctor.ts                       |    99 -
 js/src/visitor/vectorloader.ts                     |   141 -
 js/test/.eslintrc.js                               |    31 -
 js/test/Arrow.ts                                   |    62 -
 js/test/data/tables.ts                             |    84 -
 js/test/data/tables/generate.py                    |    50 -
 js/test/generate-test-data.ts                      |   723 -
 js/test/inference/column.ts                        |    62 -
 js/test/inference/nested.ts                        |    62 -
 js/test/inference/visitor/get.ts                   |    56 -
 js/test/jest-extensions.ts                         |   162 -
 js/test/tsconfig.coverage.json                     |     6 -
 js/test/tsconfig.json                              |    18 -
 js/test/unit/bit-tests.ts                          |    41 -
 js/test/unit/builders/builder-tests.ts             |   269 -
 js/test/unit/builders/date-tests.ts                |   106 -
 js/test/unit/builders/dictionary-tests.ts          |    65 -
 js/test/unit/builders/int64-tests.ts               |    91 -
 js/test/unit/builders/primitive-tests.ts           |   154 -
 js/test/unit/builders/uint64-tests.ts              |    91 -
 js/test/unit/builders/utf8-tests.ts                |    62 -
 js/test/unit/builders/utils.ts                     |   219 -
 js/test/unit/generated-data-tests.ts               |    61 -
 js/test/unit/generated-data-validators.ts          |   184 -
 js/test/unit/int-tests.ts                          |   241 -
 js/test/unit/ipc/helpers.ts                        |   203 -
 js/test/unit/ipc/message-reader-tests.ts           |   109 -
 js/test/unit/ipc/reader/file-reader-tests.ts       |   123 -
 js/test/unit/ipc/reader/from-inference-tests.ts    |   152 -
 js/test/unit/ipc/reader/json-reader-tests.ts       |    42 -
 js/test/unit/ipc/reader/stream-reader-tests.ts     |    65 -
 js/test/unit/ipc/reader/streams-dom-tests.ts       |   224 -
 js/test/unit/ipc/reader/streams-node-tests.ts      |   220 -
 js/test/unit/ipc/validate.ts                       |    74 -
 js/test/unit/ipc/writer/file-writer-tests.ts       |    46 -
 js/test/unit/ipc/writer/json-writer-tests.ts       |    48 -
 js/test/unit/ipc/writer/stream-writer-tests.ts     |   119 -
 js/test/unit/ipc/writer/streams-dom-tests.ts       |   273 -
 js/test/unit/ipc/writer/streams-node-tests.ts      |   276 -
 js/test/unit/math-tests.ts                         |    47 -
 js/test/unit/recordbatch/record-batch-tests.ts     |   130 -
 js/test/unit/table-tests.ts                        |   624 -
 js/test/unit/table/assign-tests.ts                 |    80 -
 js/test/unit/table/serialize-tests.ts              |   167 -
 js/test/unit/utils.ts                              |    21 -
 js/test/unit/vector/bool-vector-tests.ts           |   111 -
 js/test/unit/vector/date-vector-tests.ts           |   102 -
 js/test/unit/vector/numeric-vector-tests.ts        |   608 -
 js/test/unit/vector/vector-tests.ts                |   127 -
 js/test/unit/visitor-tests.ts                      |   169 -
 js/tsconfig.json                                   |    13 -
 js/tsconfig/tsconfig.base.json                     |    45 -
 js/tsconfig/tsconfig.bin.cjs.json                  |    11 -
 js/tsconfig/tsconfig.es2015.cjs.json               |     8 -
 js/tsconfig/tsconfig.es2015.cls.json               |    11 -
 js/tsconfig/tsconfig.es2015.esm.json               |     8 -
 js/tsconfig/tsconfig.es5.cjs.json                  |     8 -
 js/tsconfig/tsconfig.es5.cls.json                  |    11 -
 js/tsconfig/tsconfig.es5.esm.json                  |     8 -
 js/tsconfig/tsconfig.esnext.cjs.json               |     8 -
 js/tsconfig/tsconfig.esnext.cls.json               |    11 -
 js/tsconfig/tsconfig.esnext.esm.json               |     8 -
 js/typedoc.js                                      |    30 -
 js/yarn.lock                                       | 10958 ------
 julia/Arrow/.gitignore                             |     6 -
 julia/Arrow/LICENSE.md                             |    15 -
 julia/Arrow/Project.toml                           |    37 -
 julia/Arrow/README.md                              |    55 -
 julia/Arrow/docs/.gitignore                        |     2 -
 julia/Arrow/docs/Manifest.toml                     |   204 -
 julia/Arrow/docs/Project.toml                      |     3 -
 julia/Arrow/docs/make.jl                           |    24 -
 julia/Arrow/docs/src/index.md                      |    10 -
 julia/Arrow/docs/src/manual.md                     |   150 -
 julia/Arrow/docs/src/reference.md                  |     6 -
 julia/Arrow/src/Arrow.jl                           |   107 -
 julia/Arrow/src/FlatBuffers/FlatBuffers.jl         |   153 -
 julia/Arrow/src/FlatBuffers/builder.jl             |   440 -
 julia/Arrow/src/FlatBuffers/table.jl               |   170 -
 julia/Arrow/src/arraytypes/arraytypes.jl           |   190 -
 julia/Arrow/src/arraytypes/bool.jl                 |   111 -
 julia/Arrow/src/arraytypes/compressed.jl           |    90 -
 julia/Arrow/src/arraytypes/dictencoding.jl         |   248 -
 julia/Arrow/src/arraytypes/fixedsizelist.jl        |   153 -
 julia/Arrow/src/arraytypes/list.jl                 |   209 -
 julia/Arrow/src/arraytypes/map.jl                  |   115 -
 julia/Arrow/src/arraytypes/primitive.jl            |   106 -
 julia/Arrow/src/arraytypes/struct.jl               |   130 -
 julia/Arrow/src/arraytypes/unions.jl               |   279 -
 julia/Arrow/src/arrowtypes.jl                      |   166 -
 julia/Arrow/src/eltypes.jl                         |   415 -
 julia/Arrow/src/metadata/File.jl                   |    90 -
 julia/Arrow/src/metadata/Flatbuf.jl                |    25 -
 julia/Arrow/src/metadata/Message.jl                |   202 -
 julia/Arrow/src/metadata/Schema.jl                 |   610 -
 julia/Arrow/src/table.jl                           |   556 -
 julia/Arrow/src/utils.jl                           |   200 -
 julia/Arrow/src/write.jl                           |   456 -
 julia/Arrow/test/arrowjson.jl                      |   611 -
 julia/Arrow/test/arrowjson/datetime.json           |   911 -
 julia/Arrow/test/arrowjson/decimal.json            | 32948 -------------------
 julia/Arrow/test/arrowjson/dictionary.json         |   422 -
 .../Arrow/test/arrowjson/dictionary_unsigned.json  |   323 -
 julia/Arrow/test/arrowjson/map.json                |   291 -
 julia/Arrow/test/arrowjson/nested.json             |   537 -
 julia/Arrow/test/arrowjson/primitive-empty.json    |   879 -
 julia/Arrow/test/arrowjson/primitive.json          |  1890 --
 .../Arrow/test/arrowjson/primitive_no_batches.json |   287 -
 julia/Arrow/test/dates.jl                          |    61 -
 julia/Arrow/test/integrationtest.jl                |    49 -
 julia/Arrow/test/pyarrow_roundtrip.jl              |    74 -
 julia/Arrow/test/runtests.jl                       |   218 -
 julia/Arrow/test/testtables.jl                     |   252 -
 matlab/.gitignore                                  |    23 -
 matlab/CMakeLists.txt                              |    60 -
 matlab/README.md                                   |   112 -
 matlab/build_support/common_vars.m                 |    24 -
 matlab/build_support/compile.m                     |    41 -
 matlab/build_support/test.m                        |    28 -
 .../matlab_interface_for_apache_arrow_design.md    |   366 -
 matlab/src/+mlarrow/+util/createMetadataStruct.m   |    24 -
 matlab/src/+mlarrow/+util/createVariableStruct.m   |    24 -
 .../+util/makeValidMATLABTableVariableNames.m      |    42 -
 matlab/src/+mlarrow/+util/table2mlarrow.m          |    83 -
 matlab/src/feather_reader.cc                       |   267 -
 matlab/src/feather_reader.h                        |    77 -
 matlab/src/feather_writer.cc                       |   338 -
 matlab/src/feather_writer.h                        |    73 -
 matlab/src/featherread.m                           |    90 -
 matlab/src/featherreadmex.cc                       |    37 -
 matlab/src/featherwrite.m                          |    44 -
 matlab/src/featherwritemex.cc                      |    37 -
 matlab/src/matlab_traits.h                         |   103 -
 matlab/src/util/handle_status.cc                   |    91 -
 matlab/src/util/handle_status.h                    |    32 -
 matlab/src/util/unicode_conversion.cc              |    63 -
 matlab/src/util/unicode_conversion.h               |    32 -
 matlab/test/tfeather.m                             |   232 -
 matlab/test/tfeathermex.m                          |    76 -
 matlab/test/util/createTable.m                     |    68 -
 .../test/util/createVariablesAndMetadataStructs.m  |    98 -
 matlab/test/util/featherMEXRoundTrip.m             |    22 -
 matlab/test/util/featherRoundTrip.m                |    22 -
 python/.coveragerc                                 |    19 -
 python/.flake8.cython                              |    20 -
 python/.gitignore                                  |    45 -
 python/CMakeLists.txt                              |   619 -
 python/MANIFEST.in                                 |    15 -
 python/README.md                                   |    59 -
 python/asv-build.sh                                |    75 -
 python/asv-install.sh                              |    21 -
 python/asv-uninstall.sh                            |    21 -
 python/asv.conf.json                               |   187 -
 python/benchmarks/__init__.py                      |    16 -
 python/benchmarks/array_ops.py                     |    34 -
 python/benchmarks/common.py                        |   349 -
 python/benchmarks/convert_builtins.py              |    87 -
 python/benchmarks/convert_pandas.py                |   121 -
 python/benchmarks/io.py                            |    89 -
 python/benchmarks/microbenchmarks.py               |    45 -
 python/benchmarks/parquet.py                       |   156 -
 python/benchmarks/plasma.py                        |    72 -
 python/benchmarks/streaming.py                     |    70 -
 python/cmake_modules                               |     1 -
 python/examples/flight/client.py                   |   189 -
 python/examples/flight/middleware.py               |   167 -
 python/examples/flight/server.py                   |   154 -
 python/examples/minimal_build/Dockerfile.fedora    |    31 -
 python/examples/minimal_build/Dockerfile.ubuntu    |    38 -
 python/examples/minimal_build/README.md            |    73 -
 python/examples/minimal_build/build_conda.sh       |   119 -
 python/examples/minimal_build/build_venv.sh        |    84 -
 python/examples/plasma/sorting/multimerge.pyx      |   102 -
 python/examples/plasma/sorting/setup.py            |    27 -
 python/examples/plasma/sorting/sort_df.py          |   203 -
 python/pyarrow/__init__.pxd                        |    42 -
 python/pyarrow/__init__.py                         |   504 -
 python/pyarrow/_compute.pxd                        |    27 -
 python/pyarrow/_compute.pyx                        |  1092 -
 python/pyarrow/_csv.pxd                            |    46 -
 python/pyarrow/_csv.pyx                            |   952 -
 python/pyarrow/_cuda.pxd                           |    67 -
 python/pyarrow/_cuda.pyx                           |  1059 -
 python/pyarrow/_dataset.pyx                        |  2977 --
 python/pyarrow/_flight.pyx                         |  2578 --
 python/pyarrow/_fs.pxd                             |    94 -
 python/pyarrow/_fs.pyx                             |  1088 -
 python/pyarrow/_hdfs.pyx                           |   141 -
 python/pyarrow/_json.pyx                           |   249 -
 python/pyarrow/_orc.pxd                            |    53 -
 python/pyarrow/_orc.pyx                            |   111 -
 python/pyarrow/_parquet.pxd                        |   553 -
 python/pyarrow/_parquet.pyx                        |  1435 -
 python/pyarrow/_plasma.pyx                         |   868 -
 python/pyarrow/_s3fs.pyx                           |   260 -
 python/pyarrow/array.pxi                           |  2387 --
 python/pyarrow/benchmark.pxi                       |    20 -
 python/pyarrow/benchmark.py                        |    21 -
 python/pyarrow/builder.pxi                         |    82 -
 python/pyarrow/cffi.py                             |    71 -
 python/pyarrow/compat.pxi                          |    65 -
 python/pyarrow/compat.py                           |    29 -
 python/pyarrow/compute.py                          |   493 -
 python/pyarrow/config.pxi                          |    74 -
 python/pyarrow/csv.py                              |    22 -
 python/pyarrow/cuda.py                             |    25 -
 python/pyarrow/dataset.py                          |   779 -
 python/pyarrow/error.pxi                           |   231 -
 python/pyarrow/feather.pxi                         |   105 -
 python/pyarrow/feather.py                          |   262 -
 python/pyarrow/filesystem.py                       |   511 -
 python/pyarrow/flight.py                           |    63 -
 python/pyarrow/fs.py                               |   326 -
 python/pyarrow/gandiva.pyx                         |   482 -
 python/pyarrow/hdfs.py                             |   240 -
 python/pyarrow/includes/__init__.pxd               |     0
 python/pyarrow/includes/common.pxd                 |   137 -
 python/pyarrow/includes/libarrow.pxd               |  2356 --
 python/pyarrow/includes/libarrow_cuda.pxd          |   107 -
 python/pyarrow/includes/libarrow_dataset.pxd       |   384 -
 python/pyarrow/includes/libarrow_flight.pxd        |   552 -
 python/pyarrow/includes/libarrow_fs.pxd            |   268 -
 python/pyarrow/includes/libgandiva.pxd             |   281 -
 python/pyarrow/includes/libplasma.pxd              |    25 -
 python/pyarrow/io-hdfs.pxi                         |   470 -
 python/pyarrow/io.pxi                              |  1896 --
 python/pyarrow/ipc.pxi                             |   968 -
 python/pyarrow/ipc.py                              |   233 -
 python/pyarrow/json.py                             |    19 -
 python/pyarrow/jvm.py                              |   335 -
 python/pyarrow/lib.pxd                             |   597 -
 python/pyarrow/lib.pyx                             |   158 -
 python/pyarrow/memory.pxi                          |   216 -
 python/pyarrow/orc.py                              |   149 -
 python/pyarrow/pandas-shim.pxi                     |   254 -
 python/pyarrow/pandas_compat.py                    |  1226 -
 python/pyarrow/parquet.py                          |  2076 --
 python/pyarrow/plasma.py                           |   152 -
 python/pyarrow/public-api.pxi                      |   418 -
 python/pyarrow/scalar.pxi                          |   927 -
 python/pyarrow/serialization.pxi                   |   556 -
 python/pyarrow/serialization.py                    |   504 -
 python/pyarrow/table.pxi                           |  2266 --
 python/pyarrow/tensor.pxi                          |   892 -
 python/pyarrow/tensorflow/plasma_op.cc             |   391 -
 python/pyarrow/tests/__init__.py                   |     0
 python/pyarrow/tests/arrow_7980.py                 |    30 -
 python/pyarrow/tests/conftest.py                   |   277 -
 .../v0.17.0.version=2-compression=lz4.feather      |   Bin 594 -> 0 bytes
 python/pyarrow/tests/data/orc/README.md            |    22 -
 .../tests/data/orc/TestOrcFile.emptyFile.jsn.gz    |   Bin 50 -> 0 bytes
 .../tests/data/orc/TestOrcFile.emptyFile.orc       |   Bin 523 -> 0 bytes
 .../tests/data/orc/TestOrcFile.test1.jsn.gz        |   Bin 323 -> 0 bytes
 .../pyarrow/tests/data/orc/TestOrcFile.test1.orc   |   Bin 1711 -> 0 bytes
 .../tests/data/orc/TestOrcFile.testDate1900.jsn.gz |   Bin 182453 -> 0 bytes
 .../tests/data/orc/TestOrcFile.testDate1900.orc    |   Bin 30941 -> 0 bytes
 python/pyarrow/tests/data/orc/decimal.jsn.gz       |   Bin 19313 -> 0 bytes
 python/pyarrow/tests/data/orc/decimal.orc          |   Bin 16337 -> 0 bytes
 .../data/parquet/v0.7.1.all-named-index.parquet    |   Bin 3948 -> 0 bytes
 .../v0.7.1.column-metadata-handling.parquet        |   Bin 2012 -> 0 bytes
 python/pyarrow/tests/data/parquet/v0.7.1.parquet   |   Bin 4372 -> 0 bytes
 .../data/parquet/v0.7.1.some-named-index.parquet   |   Bin 4008 -> 0 bytes
 python/pyarrow/tests/deserialize_buffer.py         |    26 -
 python/pyarrow/tests/pandas_examples.py            |   172 -
 python/pyarrow/tests/pandas_threaded_import.py     |    44 -
 python/pyarrow/tests/parquet/common.py             |   177 -
 python/pyarrow/tests/parquet/conftest.py           |    87 -
 python/pyarrow/tests/parquet/test_basic.py         |   586 -
 .../tests/parquet/test_compliant_nested_type.py    |   113 -
 python/pyarrow/tests/parquet/test_data_types.py    |   524 -
 python/pyarrow/tests/parquet/test_dataset.py       |  1588 -
 python/pyarrow/tests/parquet/test_datetime.py      |   373 -
 python/pyarrow/tests/parquet/test_metadata.py      |   477 -
 python/pyarrow/tests/parquet/test_pandas.py        |   687 -
 python/pyarrow/tests/parquet/test_parquet_file.py  |   258 -
 .../pyarrow/tests/parquet/test_parquet_writer.py   |   275 -
 python/pyarrow/tests/pyarrow_cython_example.pyx    |    55 -
 python/pyarrow/tests/strategies.py                 |   414 -
 python/pyarrow/tests/test_adhoc_memory_leak.py     |    43 -
 python/pyarrow/tests/test_array.py                 |  2680 --
 python/pyarrow/tests/test_builder.py               |    67 -
 python/pyarrow/tests/test_cffi.py                  |   295 -
 python/pyarrow/tests/test_compute.py               |  1243 -
 python/pyarrow/tests/test_convert_builtin.py       |  2156 --
 python/pyarrow/tests/test_csv.py                   |  1345 -
 python/pyarrow/tests/test_cuda.py                  |   792 -
 python/pyarrow/tests/test_cuda_numba_interop.py    |   235 -
 python/pyarrow/tests/test_cython.py                |   143 -
 python/pyarrow/tests/test_dataset.py               |  3158 --
 python/pyarrow/tests/test_deprecations.py          |    23 -
 python/pyarrow/tests/test_extension_type.py        |   668 -
 python/pyarrow/tests/test_feather.py               |   792 -
 python/pyarrow/tests/test_filesystem.py            |    67 -
 python/pyarrow/tests/test_flight.py                |  1808 -
 python/pyarrow/tests/test_fs.py                    |  1521 -
 python/pyarrow/tests/test_gandiva.py               |   365 -
 python/pyarrow/tests/test_hdfs.py                  |   442 -
 python/pyarrow/tests/test_io.py                    |  1754 -
 python/pyarrow/tests/test_ipc.py                   |   962 -
 python/pyarrow/tests/test_json.py                  |   310 -
 python/pyarrow/tests/test_jvm.py                   |   433 -
 python/pyarrow/tests/test_memory.py                |   156 -
 python/pyarrow/tests/test_misc.py                  |   175 -
 python/pyarrow/tests/test_orc.py                   |   165 -
 python/pyarrow/tests/test_pandas.py                |  4383 ---
 python/pyarrow/tests/test_plasma.py                |  1073 -
 python/pyarrow/tests/test_plasma_tf_op.py          |   104 -
 python/pyarrow/tests/test_scalars.py               |   625 -
 python/pyarrow/tests/test_schema.py                |   721 -
 python/pyarrow/tests/test_serialization.py         |  1233 -
 .../pyarrow/tests/test_serialization_deprecated.py |    56 -
 python/pyarrow/tests/test_sparse_tensor.py         |   491 -
 python/pyarrow/tests/test_strategies.py            |    61 -
 python/pyarrow/tests/test_table.py                 |  1687 -
 python/pyarrow/tests/test_tensor.py                |   215 -
 python/pyarrow/tests/test_types.py                 |  1041 -
 python/pyarrow/tests/util.py                       |   231 -
 python/pyarrow/types.pxi                           |  2781 --
 python/pyarrow/types.py                            |   357 -
 python/pyarrow/util.py                             |   152 -
 python/pyarrow/vendored/__init__.py                |    16 -
 python/pyarrow/vendored/version.py                 |   545 -
 python/pyproject.toml                              |    26 -
 python/requirements-build.txt                      |     4 -
 python/requirements-test.txt                       |     7 -
 python/requirements-wheel-build.txt                |     6 -
 python/requirements-wheel-test.txt                 |    11 -
 python/scripts/test_imports.py                     |    21 -
 python/scripts/test_leak.py                        |   110 -
 python/setup.cfg                                   |    34 -
 python/setup.py                                    |   628 -
 r/.Rbuildignore                                    |    26 -
 r/.gitignore                                       |    20 -
 r/DESCRIPTION                                      |   102 -
 r/Makefile                                         |    53 -
 r/NAMESPACE                                        |   346 -
 r/NEWS.md                                          |   358 -
 r/R/array-data.R                                   |    53 -
 r/R/array.R                                        |   301 -
 r/R/arrow-datum.R                                  |   165 -
 r/R/arrow-package.R                                |   278 -
 r/R/arrow-tabular.R                                |   220 -
 r/R/arrowExports.R                                 |  1668 -
 r/R/buffer.R                                       |    72 -
 r/R/chunked-array.R                                |   132 -
 r/R/compression.R                                  |   121 -
 r/R/compute.R                                      |   257 -
 r/R/config.R                                       |    30 -
 r/R/csv.R                                          |   587 -
 r/R/dataset-factory.R                              |   169 -
 r/R/dataset-format.R                               |   320 -
 r/R/dataset-partition.R                            |   125 -
 r/R/dataset-scan.R                                 |   202 -
 r/R/dataset-write.R                                |    99 -
 r/R/dataset.R                                      |   320 -
 r/R/deprecated.R                                   |    40 -
 r/R/dictionary.R                                   |    69 -
 r/R/dplyr.R                                        |  1101 -
 r/R/enums.R                                        |   142 -
 r/R/expression.R                                   |   346 -
 r/R/feather.R                                      |   221 -
 r/R/field.R                                        |    82 -
 r/R/filesystem.R                                   |   510 -
 r/R/flight.R                                       |   121 -
 r/R/install-arrow.R                                |   139 -
 r/R/io.R                                           |   290 -
 r/R/ipc_stream.R                                   |   113 -
 r/R/json.R                                         |   104 -
 r/R/memory-pool.R                                  |    61 -
 r/R/message.R                                      |    95 -
 r/R/metadata.R                                     |   132 -
 r/R/parquet.R                                      |   575 -
 r/R/python.R                                       |   159 -
 r/R/record-batch-reader.R                          |   155 -
 r/R/record-batch-writer.R                          |   196 -
 r/R/record-batch.R                                 |   197 -
 r/R/reexports-bit64.R                              |    22 -
 r/R/reexports-tidyselect.R                         |    43 -
 r/R/scalar.R                                       |    79 -
 r/R/schema.R                                       |   302 -
 r/R/table.R                                        |   179 -
 r/R/type.R                                         |   484 -
 r/R/util.R                                         |   112 -
 r/README.md                                        |   306 -
 r/_pkgdown.yml                                     |   177 -
 r/arrow.Rproj                                      |    21 -
 r/cleanup                                          |    21 -
 r/configure                                        |   277 -
 r/configure.win                                    |    73 -
 r/cran-comments.md                                 |    10 -
 r/data-raw/codegen.R                               |   254 -
 r/extra-tests/helpers.R                            |    36 -
 r/extra-tests/test-read-files.R                    |   197 -
 r/extra-tests/write-files.R                        |    42 -
 r/inst/NOTICE.txt                                  |    84 -
 r/inst/build_arrow_static.sh                       |    79 -
 r/inst/demo_flight_server.py                       |   120 -
 r/inst/v0.7.1.parquet                              |   Bin 4372 -> 0 bytes
 r/lint.sh                                          |    41 -
 r/man/ArrayData.Rd                                 |    27 -
 r/man/ChunkedArray.Rd                              |    58 -
 r/man/Codec.Rd                                     |    24 -
 r/man/CsvReadOptions.Rd                            |   100 -
 r/man/CsvTableReader.Rd                            |    32 -
 r/man/DataType.Rd                                  |    15 -
 r/man/Dataset.Rd                                   |    81 -
 r/man/DictionaryType.Rd                            |    15 -
 r/man/Expression.Rd                                |    18 -
 r/man/FeatherReader.Rd                             |    33 -
 r/man/Field.Rd                                     |    35 -
 r/man/FileFormat.Rd                                |    56 -
 r/man/FileInfo.Rd                                  |    28 -
 r/man/FileSelector.Rd                              |    27 -
 r/man/FileSystem.Rd                                |    99 -
 r/man/FileWriteOptions.Rd                          |     8 -
 r/man/FixedWidthType.Rd                            |    15 -
 r/man/FragmentScanOptions.Rd                       |    29 -
 r/man/InputStream.Rd                               |    45 -
 r/man/MemoryPool.Rd                                |    24 -
 r/man/Message.Rd                                   |    15 -
 r/man/MessageReader.Rd                             |    15 -
 r/man/OutputStream.Rd                              |    38 -
 r/man/ParquetArrowReaderProperties.Rd              |    29 -
 r/man/ParquetFileReader.Rd                         |    58 -
 r/man/ParquetFileWriter.Rd                         |    31 -
 r/man/ParquetWriterProperties.Rd                   |    49 -
 r/man/Partitioning.Rd                              |    51 -
 r/man/RecordBatch.Rd                               |    92 -
 r/man/RecordBatchReader.Rd                         |    86 -
 r/man/RecordBatchWriter.Rd                         |    89 -
 r/man/Scalar.Rd                                    |     9 -
 r/man/Scanner.Rd                                   |    47 -
 r/man/Schema.Rd                                    |    85 -
 r/man/Table.Rd                                     |    92 -
 r/man/array.Rd                                     |    84 -
 r/man/arrow-package.Rd                             |    41 -
 r/man/arrow_available.Rd                           |    42 -
 r/man/arrow_info.Rd                                |    17 -
 r/man/buffer.Rd                                    |    35 -
 r/man/call_function.Rd                             |    46 -
 r/man/cast_options.Rd                              |    22 -
 r/man/codec_is_available.Rd                        |    20 -
 r/man/compression.Rd                               |    31 -
 r/man/copy_files.Rd                                |    35 -
 r/man/cpu_count.Rd                                 |    17 -
 r/man/data-type.Rd                                 |   163 -
 r/man/dataset_factory.Rd                           |    76 -
 r/man/default_memory_pool.Rd                       |    15 -
 r/man/dictionary.Rd                                |    24 -
 r/man/enums.Rd                                     |    73 -
 r/man/flight_connect.Rd                            |    21 -
 r/man/flight_get.Rd                                |    19 -
 r/man/flight_put.Rd                                |    25 -
 r/man/hive_partition.Rd                            |    32 -
 r/man/install_arrow.Rd                             |    61 -
 r/man/install_pyarrow.Rd                           |    22 -
 r/man/list_compute_functions.Rd                    |    39 -
 r/man/list_flights.Rd                              |    23 -
 r/man/load_flight_server.Rd                        |    17 -
 r/man/make_readable_file.Rd                        |    29 -
 r/man/map_batches.Rd                               |    30 -
 r/man/match_arrow.Rd                               |    28 -
 r/man/mmap_create.Rd                               |    19 -
 r/man/mmap_open.Rd                                 |    16 -
 r/man/open_dataset.Rd                              |    79 -
 r/man/read_delim_arrow.Rd                          |   218 -
 r/man/read_feather.Rd                              |    50 -
 r/man/read_ipc_stream.Rd                           |    42 -
 r/man/read_json_arrow.Rd                           |    52 -
 r/man/read_message.Rd                              |    14 -
 r/man/read_parquet.Rd                              |    50 -
 r/man/read_schema.Rd                               |    19 -
 r/man/reexports.Rd                                 |    28 -
 r/man/s3_bucket.Rd                                 |    28 -
 r/man/type.Rd                                      |    17 -
 r/man/unify_schemas.Rd                             |    26 -
 r/man/value_counts.Rd                              |    18 -
 r/man/write_dataset.Rd                             |    66 -
 r/man/write_feather.Rd                             |    61 -
 r/man/write_ipc_stream.Rd                          |    38 -
 r/man/write_parquet.Rd                             |   108 -
 r/man/write_to_raw.Rd                              |    22 -
 r/pkgdown/extra.js                                 |    65 -
 r/src/.clang-format                                |    20 -
 r/src/.gitignore                                   |     3 -
 r/src/Makevars.in                                  |    29 -
 r/src/array.cpp                                    |   286 -
 r/src/array_to_vector.cpp                          |  1330 -
 r/src/arraydata.cpp                                |    49 -
 r/src/arrowExports.cpp                             |  7032 ----
 r/src/arrow_cpp11.h                                |   377 -
 r/src/arrow_types.h                                |   237 -
 r/src/arrow_vctrs.h                                |    22 -
 r/src/buffer.cpp                                   |    71 -
 r/src/chunkedarray.cpp                             |   139 -
 r/src/compression.cpp                              |    56 -
 r/src/compute.cpp                                  |   301 -
 r/src/csv.cpp                                      |   177 -
 r/src/dataset.cpp                                  |   486 -
 r/src/datatype.cpp                                 |   426 -
 r/src/expression.cpp                               |    71 -
 r/src/feather.cpp                                  |    87 -
 r/src/field.cpp                                    |    56 -
 r/src/filesystem.cpp                               |   329 -
 r/src/imports.cpp                                  |    43 -
 r/src/io.cpp                                       |   181 -
 r/src/json.cpp                                     |    67 -
 r/src/memorypool.cpp                               |    92 -
 r/src/message.cpp                                  |   105 -
 r/src/nameof.h                                     |    93 -
 r/src/parquet.cpp                                  |   326 -
 r/src/py-to-r.cpp                                  |    81 -
 r/src/r_to_arrow.cpp                               |  1046 -
 r/src/recordbatch.cpp                              |   326 -
 r/src/recordbatchreader.cpp                        |   130 -
 r/src/recordbatchwriter.cpp                        |    67 -
 r/src/runtimeinfo.cpp                              |    30 -
 r/src/scalar.cpp                                   |    97 -
 r/src/schema.cpp                                   |   146 -
 r/src/symbols.cpp                                  |    86 -
 r/src/table.cpp                                    |   350 -
 r/src/threadpool.cpp                               |    51 -
 r/src/type_infer.cpp                               |   202 -
 r/tests/testthat.R                                 |    27 -
 .../data-arrow-extra-meta_3.0.0.parquet            |   Bin 7862 -> 0 bytes
 .../golden-files/data-arrow_0.17.0_lz4.feather     |   Bin 1650 -> 0 bytes
 .../data-arrow_0.17.0_uncompressed.feather         |   Bin 1354 -> 0 bytes
 .../golden-files/data-arrow_0.17.0_zstd.feather    |   Bin 1626 -> 0 bytes
 .../testthat/golden-files/data-arrow_1.0.1.parquet |   Bin 3603 -> 0 bytes
 .../golden-files/data-arrow_1.0.1_lz4.feather      |   Bin 2858 -> 0 bytes
 .../data-arrow_1.0.1_uncompressed.feather          |   Bin 2626 -> 0 bytes
 .../golden-files/data-arrow_1.0.1_zstd.feather     |   Bin 2842 -> 0 bytes
 .../testthat/golden-files/data-arrow_2.0.0.parquet |   Bin 3965 -> 0 bytes
 .../golden-files/data-arrow_2.0.0_lz4.feather      |   Bin 3162 -> 0 bytes
 .../data-arrow_2.0.0_uncompressed.feather          |   Bin 2930 -> 0 bytes
 .../golden-files/data-arrow_2.0.0_zstd.feather     |   Bin 3146 -> 0 bytes
 r/tests/testthat/helper-arrow.R                    |    69 -
 r/tests/testthat/helper-data.R                     |   169 -
 r/tests/testthat/helper-expectation.R              |   191 -
 r/tests/testthat/helper-parquet.R                  |    29 -
 r/tests/testthat/helper-roundtrip.R                |    43 -
 r/tests/testthat/helper-skip.R                     |    55 -
 r/tests/testthat/latin1.R                          |    76 -
 r/tests/testthat/test-Array.R                      |   820 -
 r/tests/testthat/test-RecordBatch.R                |   501 -
 r/tests/testthat/test-Table.R                      |   477 -
 r/tests/testthat/test-array-data.R                 |    35 -
 r/tests/testthat/test-arrow-info.R                 |    23 -
 r/tests/testthat/test-arrow.R                      |    74 -
 r/tests/testthat/test-backwards-compatibility.R    |   117 -
 r/tests/testthat/test-buffer-reader.R              |    40 -
 r/tests/testthat/test-buffer.R                     |    99 -
 r/tests/testthat/test-chunked-array.R              |   413 -
 r/tests/testthat/test-chunked-array.txt            |   103 -
 r/tests/testthat/test-compressed.R                 |    75 -
 r/tests/testthat/test-compute-aggregate.R          |   353 -
 r/tests/testthat/test-compute-arith.R              |   116 -
 r/tests/testthat/test-compute-sort.R               |   165 -
 r/tests/testthat/test-compute-vector.R             |   133 -
 r/tests/testthat/test-csv.R                        |   258 -
 r/tests/testthat/test-data-type.R                  |   413 -
 r/tests/testthat/test-dataset.R                    |  1786 -
 r/tests/testthat/test-dplyr-arrange.R              |   211 -
 r/tests/testthat/test-dplyr-filter.R               |   417 -
 r/tests/testthat/test-dplyr-group-by.R             |   135 -
 r/tests/testthat/test-dplyr-mutate.R               |   417 -
 r/tests/testthat/test-dplyr-string-functions.R     |   347 -
 r/tests/testthat/test-dplyr.R                      |   591 -
 r/tests/testthat/test-expression.R                 |    99 -
 r/tests/testthat/test-feather.R                    |   242 -
 r/tests/testthat/test-field.R                      |    38 -
 r/tests/testthat/test-filesystem.R                 |   175 -
 r/tests/testthat/test-install-arrow.R              |    39 -
 r/tests/testthat/test-json.R                       |   250 -
 r/tests/testthat/test-memory-pool.R                |    26 -
 r/tests/testthat/test-message-reader.R             |    86 -
 r/tests/testthat/test-message.R                    |    64 -
 r/tests/testthat/test-metadata.R                   |   207 -
 r/tests/testthat/test-parquet.R                    |   254 -
 r/tests/testthat/test-python-flight.R              |    63 -
 r/tests/testthat/test-python.R                     |   102 -
 r/tests/testthat/test-read-record-batch.R          |    79 -
 r/tests/testthat/test-read-write.R                 |   126 -
 r/tests/testthat/test-record-batch-reader.R        |   142 -
 r/tests/testthat/test-s3-minio.R                   |   229 -
 r/tests/testthat/test-s3.R                         |    53 -
 r/tests/testthat/test-scalar.R                     |   104 -
 r/tests/testthat/test-schema.R                     |   176 -
 r/tests/testthat/test-thread-pool.R                |    26 -
 r/tests/testthat/test-type.R                       |   108 -
 r/tests/testthat/test-utf.R                        |    25 -
 r/tools/autobrew                                   |    66 -
 r/tools/nixlibs.R                                  |   506 -
 r/tools/ubsan.supp                                 |    18 -
 r/tools/winlibs.R                                  |    65 -
 r/vignettes/arrow.Rmd                              |   206 -
 r/vignettes/dataset.Rmd                            |   397 -
 r/vignettes/developing.Rmd                         |   520 -
 r/vignettes/flight.Rmd                             |    78 -
 r/vignettes/fs.Rmd                                 |   130 -
 r/vignettes/install.Rmd                            |   366 -
 r/vignettes/python.Rmd                             |   131 -
 ruby/Gemfile                                       |    22 -
 ruby/README.md                                     |    36 -
 ruby/Rakefile                                      |    56 -
 ruby/red-arrow-cuda/.gitignore                     |    18 -
 ruby/red-arrow-cuda/Gemfile                        |    24 -
 ruby/red-arrow-cuda/LICENSE.txt                    |   202 -
 ruby/red-arrow-cuda/NOTICE.txt                     |     2 -
 ruby/red-arrow-cuda/README.md                      |    60 -
 ruby/red-arrow-cuda/Rakefile                       |    41 -
 ruby/red-arrow-cuda/dependency-check/Rakefile      |    47 -
 ruby/red-arrow-cuda/lib/arrow-cuda.rb              |    29 -
 .../lib/arrow-cuda/device-manager.rb               |    25 -
 ruby/red-arrow-cuda/lib/arrow-cuda/loader.rb       |    35 -
 ruby/red-arrow-cuda/lib/arrow-cuda/version.rb      |    26 -
 ruby/red-arrow-cuda/red-arrow-cuda.gemspec         |    51 -
 ruby/red-arrow-cuda/test/helper.rb                 |    20 -
 ruby/red-arrow-cuda/test/run-test.rb               |    50 -
 ruby/red-arrow-cuda/test/test-cuda.rb              |    38 -
 ruby/red-arrow-dataset/.gitignore                  |    18 -
 ruby/red-arrow-dataset/Gemfile                     |    24 -
 ruby/red-arrow-dataset/LICENSE.txt                 |   202 -
 ruby/red-arrow-dataset/NOTICE.txt                  |     2 -
 ruby/red-arrow-dataset/README.md                   |    50 -
 ruby/red-arrow-dataset/Rakefile                    |    41 -
 ruby/red-arrow-dataset/dependency-check/Rakefile   |    47 -
 ruby/red-arrow-dataset/lib/arrow-dataset.rb        |    29 -
 .../lib/arrow-dataset/in-memory-fragment.rb        |    32 -
 .../lib/arrow-dataset/in-memory-scan-task.rb       |    35 -
 ruby/red-arrow-dataset/lib/arrow-dataset/loader.rb |    36 -
 .../lib/arrow-dataset/scan-options.rb              |    37 -
 .../red-arrow-dataset/lib/arrow-dataset/version.rb |    26 -
 ruby/red-arrow-dataset/red-arrow-dataset.gemspec   |    51 -
 ruby/red-arrow-dataset/test/helper.rb              |    20 -
 ruby/red-arrow-dataset/test/run-test.rb            |    50 -
 .../test/test-in-memory-scan-task.rb               |    33 -
 ruby/red-arrow-dataset/test/test-scan-options.rb   |    36 -
 ruby/red-arrow/.gitignore                          |    22 -
 ruby/red-arrow/.yardopts                           |     6 -
 ruby/red-arrow/Gemfile                             |    22 -
 ruby/red-arrow/LICENSE.txt                         |   202 -
 ruby/red-arrow/NOTICE.txt                          |     2 -
 ruby/red-arrow/README.md                           |    52 -
 ruby/red-arrow/Rakefile                            |   100 -
 ruby/red-arrow/benchmark/raw-records/boolean.yml   |    65 -
 .../red-arrow/benchmark/raw-records/decimal128.yml |    68 -
 .../red-arrow/benchmark/raw-records/dictionary.yml |    75 -
 ruby/red-arrow/benchmark/raw-records/int64.yml     |    67 -
 ruby/red-arrow/benchmark/raw-records/list.yml      |    70 -
 ruby/red-arrow/benchmark/raw-records/string.yml    |    65 -
 ruby/red-arrow/benchmark/raw-records/timestamp.yml |    75 -
 ruby/red-arrow/benchmark/values/boolean.yml        |    37 -
 ruby/red-arrow/benchmark/values/decimal128.yml     |    38 -
 ruby/red-arrow/benchmark/values/dictionary.yml     |    46 -
 ruby/red-arrow/benchmark/values/int64.yml          |    37 -
 ruby/red-arrow/benchmark/values/list.yml           |    44 -
 ruby/red-arrow/benchmark/values/string.yml         |    38 -
 ruby/red-arrow/benchmark/values/timestamp.yml      |    49 -
 ruby/red-arrow/doc/text/development.md             |    34 -
 ruby/red-arrow/example/read-file.rb                |    36 -
 ruby/red-arrow/example/read-stream.rb              |    36 -
 ruby/red-arrow/example/write-file.rb               |    63 -
 ruby/red-arrow/example/write-stream.rb             |    63 -
 ruby/red-arrow/ext/arrow/arrow.cpp                 |    81 -
 ruby/red-arrow/ext/arrow/converters.cpp            |    42 -
 ruby/red-arrow/ext/arrow/converters.hpp            |   669 -
 ruby/red-arrow/ext/arrow/extconf.rb                |    63 -
 ruby/red-arrow/ext/arrow/raw-records.cpp           |   183 -
 ruby/red-arrow/ext/arrow/red-arrow.hpp             |    95 -
 ruby/red-arrow/ext/arrow/values.cpp                |   156 -
 ruby/red-arrow/image/red-arrow.png                 |   Bin 7165 -> 0 bytes
 ruby/red-arrow/lib/arrow.rb                        |    30 -
 ruby/red-arrow/lib/arrow/array-builder.rb          |   209 -
 ruby/red-arrow/lib/arrow/array.rb                  |   222 -
 ruby/red-arrow/lib/arrow/bigdecimal-extension.rb   |    28 -
 ruby/red-arrow/lib/arrow/block-closable.rb         |    35 -
 ruby/red-arrow/lib/arrow/buffer.rb                 |    28 -
 ruby/red-arrow/lib/arrow/chunked-array.rb          |    91 -
 ruby/red-arrow/lib/arrow/column-containable.rb     |    48 -
 ruby/red-arrow/lib/arrow/column.rb                 |    76 -
 ruby/red-arrow/lib/arrow/compression-type.rb       |    37 -
 ruby/red-arrow/lib/arrow/csv-loader.rb             |   384 -
 ruby/red-arrow/lib/arrow/csv-read-options.rb       |    43 -
 ruby/red-arrow/lib/arrow/data-type.rb              |   198 -
 ruby/red-arrow/lib/arrow/date32-array-builder.rb   |    32 -
 ruby/red-arrow/lib/arrow/date32-array.rb           |    30 -
 ruby/red-arrow/lib/arrow/date64-array-builder.rb   |    33 -
 ruby/red-arrow/lib/arrow/date64-array.rb           |    29 -
 .../lib/arrow/decimal128-array-builder.rb          |    58 -
 ruby/red-arrow/lib/arrow/decimal128-array.rb       |    24 -
 ruby/red-arrow/lib/arrow/decimal128-data-type.rb   |    71 -
 ruby/red-arrow/lib/arrow/decimal128.rb             |    60 -
 .../lib/arrow/decimal256-array-builder.rb          |    61 -
 ruby/red-arrow/lib/arrow/decimal256-array.rb       |    25 -
 ruby/red-arrow/lib/arrow/decimal256-data-type.rb   |    73 -
 ruby/red-arrow/lib/arrow/decimal256.rb             |    60 -
 ruby/red-arrow/lib/arrow/dense-union-data-type.rb  |    90 -
 ruby/red-arrow/lib/arrow/dictionary-array.rb       |    24 -
 ruby/red-arrow/lib/arrow/dictionary-data-type.rb   |   117 -
 ruby/red-arrow/lib/arrow/field-containable.rb      |    38 -
 ruby/red-arrow/lib/arrow/field.rb                  |   118 -
 ruby/red-arrow/lib/arrow/file-output-stream.rb     |    34 -
 .../lib/arrow/fixed-size-binary-array-builder.rb   |    38 -
 .../red-arrow/lib/arrow/fixed-size-binary-array.rb |    26 -
 ruby/red-arrow/lib/arrow/generic-filterable.rb     |    43 -
 ruby/red-arrow/lib/arrow/generic-takeable.rb       |    38 -
 ruby/red-arrow/lib/arrow/group.rb                  |   172 -
 ruby/red-arrow/lib/arrow/list-array-builder.rb     |    96 -
 ruby/red-arrow/lib/arrow/list-data-type.rb         |   118 -
 ruby/red-arrow/lib/arrow/loader.rb                 |   172 -
 ruby/red-arrow/lib/arrow/null-array-builder.rb     |    26 -
 ruby/red-arrow/lib/arrow/null-array.rb             |    24 -
 ruby/red-arrow/lib/arrow/path-extension.rb         |    45 -
 ruby/red-arrow/lib/arrow/raw-table-converter.rb    |    47 -
 ruby/red-arrow/lib/arrow/record-batch-builder.rb   |   114 -
 .../lib/arrow/record-batch-file-reader.rb          |    28 -
 ruby/red-arrow/lib/arrow/record-batch-iterator.rb  |    22 -
 .../lib/arrow/record-batch-stream-reader.rb        |    30 -
 ruby/red-arrow/lib/arrow/record-batch.rb           |    77 -
 ruby/red-arrow/lib/arrow/record-containable.rb     |    38 -
 ruby/red-arrow/lib/arrow/record.rb                 |    60 -
 ruby/red-arrow/lib/arrow/rolling-window.rb         |    48 -
 ruby/red-arrow/lib/arrow/schema.rb                 |   100 -
 ruby/red-arrow/lib/arrow/slicer.rb                 |   454 -
 ruby/red-arrow/lib/arrow/sort-key.rb               |   193 -
 ruby/red-arrow/lib/arrow/sort-options.rb           |   109 -
 ruby/red-arrow/lib/arrow/sparse-union-data-type.rb |    90 -
 ruby/red-arrow/lib/arrow/struct-array-builder.rb   |   146 -
 ruby/red-arrow/lib/arrow/struct-array.rb           |    68 -
 ruby/red-arrow/lib/arrow/struct-data-type.rb       |   128 -
 ruby/red-arrow/lib/arrow/table-formatter.rb        |    66 -
 ruby/red-arrow/lib/arrow/table-list-formatter.rb   |    39 -
 ruby/red-arrow/lib/arrow/table-loader.rb           |   187 -
 ruby/red-arrow/lib/arrow/table-saver.rb            |   169 -
 ruby/red-arrow/lib/arrow/table-table-formatter.rb  |    73 -
 ruby/red-arrow/lib/arrow/table.rb                  |   525 -
 ruby/red-arrow/lib/arrow/tensor.rb                 |    24 -
 ruby/red-arrow/lib/arrow/time.rb                   |   159 -
 ruby/red-arrow/lib/arrow/time32-array-builder.rb   |    49 -
 ruby/red-arrow/lib/arrow/time32-array.rb           |    28 -
 ruby/red-arrow/lib/arrow/time32-data-type.rb       |    61 -
 ruby/red-arrow/lib/arrow/time64-array-builder.rb   |    49 -
 ruby/red-arrow/lib/arrow/time64-array.rb           |    28 -
 ruby/red-arrow/lib/arrow/time64-data-type.rb       |    61 -
 .../red-arrow/lib/arrow/timestamp-array-builder.rb |    65 -
 ruby/red-arrow/lib/arrow/timestamp-array.rb        |    42 -
 ruby/red-arrow/lib/arrow/timestamp-data-type.rb    |    57 -
 ruby/red-arrow/lib/arrow/version.rb                |    26 -
 ruby/red-arrow/lib/arrow/writable.rb               |    22 -
 ruby/red-arrow/red-arrow.gemspec                   |    66 -
 ruby/red-arrow/test/fixture/TestOrcFile.test1.orc  |   Bin 1711 -> 0 bytes
 ruby/red-arrow/test/fixture/float-integer.csv      |    20 -
 ruby/red-arrow/test/fixture/integer-float.csv      |    20 -
 .../test/fixture/null-with-double-quote.csv        |    20 -
 .../test/fixture/null-without-double-quote.csv     |    20 -
 ruby/red-arrow/test/fixture/with-header-float.csv  |    20 -
 ruby/red-arrow/test/fixture/with-header.csv        |    20 -
 .../test/fixture/without-header-float.csv          |    19 -
 ruby/red-arrow/test/fixture/without-header.csv     |    19 -
 ruby/red-arrow/test/helper.rb                      |    27 -
 ruby/red-arrow/test/helper/fixture.rb              |    28 -
 ruby/red-arrow/test/helper/omittable.rb            |    36 -
 .../test/raw-records/test-basic-arrays.rb          |   365 -
 .../test/raw-records/test-dense-union-array.rb     |   480 -
 ruby/red-arrow/test/raw-records/test-list-array.rb |   552 -
 .../test/raw-records/test-multiple-columns.rb      |    65 -
 .../test/raw-records/test-sparse-union-array.rb    |   470 -
 .../test/raw-records/test-struct-array.rb          |   470 -
 ruby/red-arrow/test/raw-records/test-table.rb      |    47 -
 ruby/red-arrow/test/run-test.rb                    |    71 -
 ruby/red-arrow/test/test-array-builder.rb          |   129 -
 ruby/red-arrow/test/test-array.rb                  |   291 -
 ruby/red-arrow/test/test-bigdecimal.rb             |    40 -
 ruby/red-arrow/test/test-buffer.rb                 |    49 -
 ruby/red-arrow/test/test-chunked-array.rb          |   183 -
 ruby/red-arrow/test/test-column.rb                 |    92 -
 ruby/red-arrow/test/test-csv-loader.rb             |   250 -
 ruby/red-arrow/test/test-data-type.rb              |    83 -
 ruby/red-arrow/test/test-date32-array.rb           |    24 -
 ruby/red-arrow/test/test-date64-array.rb           |    25 -
 .../test/test-decimal128-array-builder.rb          |   112 -
 ruby/red-arrow/test/test-decimal128-array.rb       |    38 -
 ruby/red-arrow/test/test-decimal128-data-type.rb   |    31 -
 ruby/red-arrow/test/test-decimal128.rb             |   102 -
 .../test/test-decimal256-array-builder.rb          |   112 -
 ruby/red-arrow/test/test-decimal256-array.rb       |    38 -
 ruby/red-arrow/test/test-decimal256-data-type.rb   |    31 -
 ruby/red-arrow/test/test-decimal256.rb             |   102 -
 ruby/red-arrow/test/test-dense-union-data-type.rb  |    41 -
 ruby/red-arrow/test/test-dictionary-array.rb       |    41 -
 ruby/red-arrow/test/test-dictionary-data-type.rb   |    40 -
 ruby/red-arrow/test/test-feather.rb                |    49 -
 ruby/red-arrow/test/test-field.rb                  |    91 -
 ruby/red-arrow/test/test-file-output-stream.rb     |    54 -
 .../test/test-fixed-size-binary-array-builder.rb   |    92 -
 .../red-arrow/test/test-fixed-size-binary-array.rb |    36 -
 ruby/red-arrow/test/test-group.rb                  |   156 -
 ruby/red-arrow/test/test-list-array-builder.rb     |    79 -
 ruby/red-arrow/test/test-list-array.rb             |    32 -
 ruby/red-arrow/test/test-list-data-type.rb         |    69 -
 ruby/red-arrow/test/test-null-array.rb             |    23 -
 ruby/red-arrow/test/test-orc.rb                    |   177 -
 ruby/red-arrow/test/test-record-batch-builder.rb   |   125 -
 .../test/test-record-batch-file-reader.rb          |   115 -
 ruby/red-arrow/test/test-record-batch-iterator.rb  |    37 -
 ruby/red-arrow/test/test-record-batch.rb           |   140 -
 ruby/red-arrow/test/test-rolling-window.rb         |    40 -
 ruby/red-arrow/test/test-schema.rb                 |   134 -
 ruby/red-arrow/test/test-slicer.rb                 |   488 -
 ruby/red-arrow/test/test-sort-indices.rb           |    40 -
 ruby/red-arrow/test/test-sort-key.rb               |    81 -
 ruby/red-arrow/test/test-sort-options.rb           |    58 -
 ruby/red-arrow/test/test-sparse-union-data-type.rb |    41 -
 ruby/red-arrow/test/test-struct-array-builder.rb   |   184 -
 ruby/red-arrow/test/test-struct-array.rb           |    94 -
 ruby/red-arrow/test/test-struct-data-type.rb       |   112 -
 ruby/red-arrow/test/test-table.rb                  |   788 -
 ruby/red-arrow/test/test-tensor.rb                 |    56 -
 ruby/red-arrow/test/test-time.rb                   |   288 -
 ruby/red-arrow/test/test-time32-array.rb           |    81 -
 ruby/red-arrow/test/test-time32-data-type.rb       |    42 -
 ruby/red-arrow/test/test-time64-array.rb           |    81 -
 ruby/red-arrow/test/test-time64-data-type.rb       |    42 -
 ruby/red-arrow/test/test-timestamp-array.rb        |    45 -
 ruby/red-arrow/test/test-timestamp-data-type.rb    |    42 -
 ruby/red-arrow/test/values/test-basic-arrays.rb    |   295 -
 .../test/values/test-dense-union-array.rb          |   468 -
 ruby/red-arrow/test/values/test-list-array.rb      |   515 -
 .../test/values/test-sparse-union-array.rb         |   459 -
 ruby/red-arrow/test/values/test-struct-array.rb    |   467 -
 ruby/red-gandiva/.gitignore                        |    18 -
 ruby/red-gandiva/Gemfile                           |    24 -
 ruby/red-gandiva/LICENSE.txt                       |   202 -
 ruby/red-gandiva/NOTICE.txt                        |     2 -
 ruby/red-gandiva/README.md                         |    68 -
 ruby/red-gandiva/Rakefile                          |    41 -
 ruby/red-gandiva/dependency-check/Rakefile         |    47 -
 ruby/red-gandiva/lib/gandiva.rb                    |    29 -
 ruby/red-gandiva/lib/gandiva/arrow-schema.rb       |    25 -
 ruby/red-gandiva/lib/gandiva/expression-builder.rb |    45 -
 .../lib/gandiva/expression-builder/add.rb          |    40 -
 .../gandiva/expression-builder/binary-operation.rb |    38 -
 .../lib/gandiva/expression-builder/context.rb      |    26 -
 .../lib/gandiva/expression-builder/divide.rb       |    34 -
 .../lib/gandiva/expression-builder/elsif.rb        |    36 -
 .../lib/gandiva/expression-builder/equal.rb        |    33 -
 .../lib/gandiva/expression-builder/field.rb        |    32 -
 .../lib/gandiva/expression-builder/greater-than.rb |    33 -
 .../lib/gandiva/expression-builder/if.rb           |    75 -
 .../lib/gandiva/expression-builder/less-than.rb    |    33 -
 .../lib/gandiva/expression-builder/literal.rb      |    65 -
 .../lib/gandiva/expression-builder/multiply.rb     |    34 -
 .../lib/gandiva/expression-builder/record.rb       |    45 -
 .../lib/gandiva/expression-builder/subtract.rb     |    34 -
 .../lib/gandiva/expression-builder/value.rb        |    55 -
 ruby/red-gandiva/lib/gandiva/loader.rb             |    49 -
 ruby/red-gandiva/lib/gandiva/version.rb            |    26 -
 ruby/red-gandiva/red-gandiva.gemspec               |    49 -
 .../test/expression-builder/test-add.rb            |    54 -
 .../test/expression-builder/test-record.rb         |    45 -
 ruby/red-gandiva/test/helper.rb                    |    20 -
 ruby/red-gandiva/test/run-test.rb                  |    50 -
 ruby/red-gandiva/test/test-boolean-literal-node.rb |    24 -
 ruby/red-gandiva/test/test-projector.rb            |    49 -
 ruby/red-parquet/.gitignore                        |    18 -
 ruby/red-parquet/Gemfile                           |    24 -
 ruby/red-parquet/LICENSE.txt                       |   202 -
 ruby/red-parquet/NOTICE.txt                        |     2 -
 ruby/red-parquet/README.md                         |    52 -
 ruby/red-parquet/Rakefile                          |    41 -
 ruby/red-parquet/dependency-check/Rakefile         |    47 -
 ruby/red-parquet/lib/parquet.rb                    |    29 -
 .../lib/parquet/arrow-table-loadable.rb            |    36 -
 .../red-parquet/lib/parquet/arrow-table-savable.rb |    52 -
 ruby/red-parquet/lib/parquet/loader.rb             |    46 -
 ruby/red-parquet/lib/parquet/version.rb            |    26 -
 ruby/red-parquet/lib/parquet/writer-properties.rb  |    28 -
 ruby/red-parquet/red-parquet.gemspec               |    49 -
 ruby/red-parquet/test/helper.rb                    |    22 -
 ruby/red-parquet/test/run-test.rb                  |    50 -
 ruby/red-parquet/test/test-arrow-table.rb          |    99 -
 ruby/red-plasma/.gitignore                         |    18 -
 ruby/red-plasma/Gemfile                            |    24 -
 ruby/red-plasma/LICENSE.txt                        |   202 -
 ruby/red-plasma/NOTICE.txt                         |     2 -
 ruby/red-plasma/README.md                          |    58 -
 ruby/red-plasma/Rakefile                           |    41 -
 ruby/red-plasma/dependency-check/Rakefile          |    47 -
 ruby/red-plasma/lib/plasma.rb                      |    29 -
 ruby/red-plasma/lib/plasma/client.rb               |    35 -
 ruby/red-plasma/lib/plasma/loader.rb               |    35 -
 ruby/red-plasma/lib/plasma/version.rb              |    26 -
 ruby/red-plasma/red-plasma.gemspec                 |    49 -
 ruby/red-plasma/test/helper.rb                     |    25 -
 ruby/red-plasma/test/helper/omittable.rb           |    36 -
 ruby/red-plasma/test/helper/plasma-store.rb        |    57 -
 ruby/red-plasma/test/run-test.rb                   |    50 -
 ruby/red-plasma/test/test-plasma-client.rb         |    53 -
 rust/Cargo.toml                                    |     5 +-
 rust/ballista/.dockerignore                        |    18 -
 rust/ballista/README.md                            |    64 -
 rust/ballista/dev/build-rust-base.sh               |    21 -
 rust/ballista/dev/build-rust.sh                    |    24 -
 rust/ballista/dev/integration-tests.sh             |    28 -
 rust/ballista/docker/README.md                     |    29 -
 rust/ballista/docker/rust-base.dockerfile          |    99 -
 rust/ballista/docker/rust.dockerfile               |    71 -
 rust/ballista/docs/README.md                       |    37 -
 rust/ballista/docs/architecture.md                 |    75 -
 rust/ballista/docs/dev-env-rust.md                 |    38 -
 rust/ballista/docs/images/query-execution.png      |   Bin 11378 -> 0 bytes
 rust/ballista/docs/integration-testing.md          |    32 -
 rust/ballista/docs/release-process.md              |    68 -
 rust/ballista/docs/rust-docker.md                  |    66 -
 rust/ballista/docs/user-guide/.gitignore           |     2 -
 rust/ballista/docs/user-guide/README.md            |    36 -
 rust/ballista/docs/user-guide/book.toml            |    23 -
 rust/ballista/docs/user-guide/src/SUMMARY.md       |    30 -
 rust/ballista/docs/user-guide/src/client-rust.md   |    22 -
 rust/ballista/docs/user-guide/src/clients.md       |    22 -
 rust/ballista/docs/user-guide/src/configuration.md |    32 -
 rust/ballista/docs/user-guide/src/deployment.md    |    26 -
 .../ballista/docs/user-guide/src/docker-compose.md |    55 -
 rust/ballista/docs/user-guide/src/faq.md           |    31 -
 .../user-guide/src/img/ballista-architecture.png   |   Bin 21225 -> 0 bytes
 rust/ballista/docs/user-guide/src/introduction.md  |    52 -
 rust/ballista/docs/user-guide/src/kubernetes.md    |   216 -
 rust/ballista/docs/user-guide/src/standalone.md    |    92 -
 rust/ballista/rust/.dockerignore                   |    23 -
 rust/ballista/rust/.gitignore                      |     2 -
 rust/ballista/rust/Cargo.toml                      |    30 -
 rust/ballista/rust/benchmarks/tpch/.dockerignore   |    25 -
 rust/ballista/rust/benchmarks/tpch/.gitignore      |     1 -
 rust/ballista/rust/benchmarks/tpch/Cargo.toml      |    36 -
 rust/ballista/rust/benchmarks/tpch/README.md       |   104 -
 .../rust/benchmarks/tpch/docker-compose.yaml       |    62 -
 rust/ballista/rust/benchmarks/tpch/entrypoint.sh   |    22 -
 rust/ballista/rust/benchmarks/tpch/queries/q1.sql  |    21 -
 rust/ballista/rust/benchmarks/tpch/queries/q10.sql |    31 -
 rust/ballista/rust/benchmarks/tpch/queries/q11.sql |    27 -
 rust/ballista/rust/benchmarks/tpch/queries/q12.sql |    30 -
 rust/ballista/rust/benchmarks/tpch/queries/q13.sql |    20 -
 rust/ballista/rust/benchmarks/tpch/queries/q14.sql |    13 -
 rust/ballista/rust/benchmarks/tpch/queries/q16.sql |    30 -
 rust/ballista/rust/benchmarks/tpch/queries/q17.sql |    17 -
 rust/ballista/rust/benchmarks/tpch/queries/q18.sql |    32 -
 rust/ballista/rust/benchmarks/tpch/queries/q19.sql |    35 -
 rust/ballista/rust/benchmarks/tpch/queries/q2.sql  |    43 -
 rust/ballista/rust/benchmarks/tpch/queries/q20.sql |    37 -
 rust/ballista/rust/benchmarks/tpch/queries/q21.sql |    39 -
 rust/ballista/rust/benchmarks/tpch/queries/q22.sql |    37 -
 rust/ballista/rust/benchmarks/tpch/queries/q3.sql  |    22 -
 rust/ballista/rust/benchmarks/tpch/queries/q4.sql  |    21 -
 rust/ballista/rust/benchmarks/tpch/queries/q5.sql  |    24 -
 rust/ballista/rust/benchmarks/tpch/queries/q6.sql  |     9 -
 rust/ballista/rust/benchmarks/tpch/queries/q7.sql  |    39 -
 rust/ballista/rust/benchmarks/tpch/queries/q8.sql  |    37 -
 rust/ballista/rust/benchmarks/tpch/queries/q9.sql  |    32 -
 rust/ballista/rust/benchmarks/tpch/run.sh          |    25 -
 rust/ballista/rust/benchmarks/tpch/src/main.rs     |   360 -
 rust/ballista/rust/benchmarks/tpch/tpch-gen.sh     |    33 -
 .../rust/benchmarks/tpch/tpchgen.dockerfile        |    32 -
 rust/ballista/rust/client/Cargo.toml               |    35 -
 rust/ballista/rust/client/README.md                |    22 -
 rust/ballista/rust/client/src/columnar_batch.rs    |   167 -
 rust/ballista/rust/client/src/context.rs           |   400 -
 rust/ballista/rust/client/src/lib.rs               |    20 -
 rust/ballista/rust/client/src/prelude.rs           |    23 -
 rust/ballista/rust/core/Cargo.toml                 |    50 -
 rust/ballista/rust/core/README.md                  |    21 -
 rust/ballista/rust/core/build.rs                   |    26 -
 rust/ballista/rust/core/proto/ballista.proto       |   824 -
 rust/ballista/rust/core/src/client.rs              |   224 -
 rust/ballista/rust/core/src/datasource.rs          |    72 -
 rust/ballista/rust/core/src/error.rs               |   172 -
 rust/ballista/rust/core/src/execution_plans/mod.rs |    27 -
 .../rust/core/src/execution_plans/query_stage.rs   |    92 -
 .../core/src/execution_plans/shuffle_reader.rs     |   106 -
 .../core/src/execution_plans/unresolved_shuffle.rs |   101 -
 rust/ballista/rust/core/src/lib.rs                 |    34 -
 rust/ballista/rust/core/src/memory_stream.rs       |    93 -
 .../rust/core/src/serde/logical_plan/from_proto.rs |  1200 -
 .../rust/core/src/serde/logical_plan/mod.rs        |   929 -
 .../rust/core/src/serde/logical_plan/to_proto.rs   |  1233 -
 rust/ballista/rust/core/src/serde/mod.rs           |    69 -
 .../core/src/serde/physical_plan/from_proto.rs     |   398 -
 .../rust/core/src/serde/physical_plan/mod.rs       |   178 -
 .../rust/core/src/serde/physical_plan/to_proto.rs  |   556 -
 .../rust/core/src/serde/scheduler/from_proto.rs    |   124 -
 rust/ballista/rust/core/src/serde/scheduler/mod.rs |   262 -
 .../rust/core/src/serde/scheduler/to_proto.rs      |    90 -
 rust/ballista/rust/core/src/utils.rs               |   327 -
 rust/ballista/rust/executor/Cargo.toml             |    59 -
 rust/ballista/rust/executor/README.md              |    31 -
 rust/ballista/rust/executor/build.rs               |    24 -
 .../executor/examples/example_executor_config.toml |    22 -
 .../rust/executor/executor_config_spec.toml        |    79 -
 rust/ballista/rust/executor/src/collect.rs         |   127 -
 rust/ballista/rust/executor/src/execution_loop.rs  |   172 -
 rust/ballista/rust/executor/src/flight_service.rs  |   374 -
 rust/ballista/rust/executor/src/lib.rs             |    52 -
 rust/ballista/rust/executor/src/main.rs            |   176 -
 rust/ballista/rust/scheduler/Cargo.toml            |    66 -
 rust/ballista/rust/scheduler/README.md             |    51 -
 rust/ballista/rust/scheduler/build.rs              |    24 -
 .../rust/scheduler/scheduler_config_spec.toml      |    60 -
 rust/ballista/rust/scheduler/src/api/handlers.rs   |    55 -
 rust/ballista/rust/scheduler/src/api/mod.rs        |    87 -
 rust/ballista/rust/scheduler/src/lib.rs            |   490 -
 rust/ballista/rust/scheduler/src/main.rs           |   156 -
 rust/ballista/rust/scheduler/src/planner.rs        |   494 -
 rust/ballista/rust/scheduler/src/state/etcd.rs     |   205 -
 rust/ballista/rust/scheduler/src/state/mod.rs      |   880 -
 .../rust/scheduler/src/state/standalone.rs         |   228 -
 rust/ballista/rust/scheduler/src/test_utils.rs     |   148 -
 .../rust/scheduler/testdata/customer/customer.tbl  |    10 -
 .../scheduler/testdata/lineitem/partition0.tbl     |    10 -
 .../scheduler/testdata/lineitem/partition1.tbl     |    10 -
 .../rust/scheduler/testdata/nation/nation.tbl      |    10 -
 .../rust/scheduler/testdata/orders/orders.tbl      |    10 -
 .../ballista/rust/scheduler/testdata/part/part.tbl |    10 -
 .../rust/scheduler/testdata/partsupp/partsupp.tbl  |    10 -
 .../rust/scheduler/testdata/region/region.tbl      |     5 -
 .../rust/scheduler/testdata/supplier/supplier.tbl  |    10 -
 rust/ballista/ui/scheduler/.gitignore              |    23 -
 rust/ballista/ui/scheduler/README.md               |    45 -
 rust/ballista/ui/scheduler/index.d.ts              |    18 -
 rust/ballista/ui/scheduler/package.json            |    58 -
 rust/ballista/ui/scheduler/public/favicon.ico      |   Bin 3870 -> 0 bytes
 rust/ballista/ui/scheduler/public/index.html       |    62 -
 rust/ballista/ui/scheduler/public/logo192.png      |   Bin 5347 -> 0 bytes
 rust/ballista/ui/scheduler/public/logo512.png      |   Bin 9664 -> 0 bytes
 rust/ballista/ui/scheduler/public/manifest.json    |    25 -
 rust/ballista/ui/scheduler/public/robots.txt       |    20 -
 rust/ballista/ui/scheduler/react-table-config.d.ts |   137 -
 rust/ballista/ui/scheduler/src/App.css             |    18 -
 rust/ballista/ui/scheduler/src/App.test.tsx        |    26 -
 rust/ballista/ui/scheduler/src/App.tsx             |    97 -
 .../ui/scheduler/src/components/DataTable.tsx      |   131 -
 .../ballista/ui/scheduler/src/components/Empty.tsx |    36 -
 .../ui/scheduler/src/components/Footer.tsx         |    28 -
 .../ui/scheduler/src/components/Header.tsx         |    82 -
 .../ui/scheduler/src/components/NodesList.tsx      |    71 -
 .../ui/scheduler/src/components/QueriesList.tsx    |   115 -
 .../ui/scheduler/src/components/Summary.tsx        |    89 -
 rust/ballista/ui/scheduler/src/components/logo.svg |    25 -
 rust/ballista/ui/scheduler/src/index.css           |    32 -
 rust/ballista/ui/scheduler/src/index.tsx           |    38 -
 rust/ballista/ui/scheduler/src/react-app-env.d.ts  |    18 -
 rust/ballista/ui/scheduler/src/reportWebVitals.ts  |    32 -
 rust/ballista/ui/scheduler/src/setupTests.ts       |    22 -
 rust/ballista/ui/scheduler/tsconfig.json           |    28 -
 rust/ballista/ui/scheduler/yarn.lock               | 12431 -------
 rust/benchmarks/Cargo.toml                         |    42 -
 rust/benchmarks/README.md                          |   120 -
 rust/benchmarks/src/bin/nyctaxi.rs                 |   151 -
 rust/benchmarks/src/bin/tpch.rs                    |  1692 -
 rust/datafusion-examples/Cargo.toml                |    39 -
 rust/datafusion-examples/examples/README.md        |    28 -
 rust/datafusion-examples/examples/csv_sql.rs       |    52 -
 rust/datafusion-examples/examples/dataframe.rs     |    47 -
 .../examples/dataframe_in_memory.rs                |    67 -
 rust/datafusion-examples/examples/flight_client.rs |    79 -
 rust/datafusion-examples/examples/flight_server.rs |   213 -
 rust/datafusion-examples/examples/parquet_sql.rs   |    50 -
 rust/datafusion-examples/examples/simple_udaf.rs   |   170 -
 rust/datafusion-examples/examples/simple_udf.rs    |   151 -
 rust/datafusion/Cargo.toml                         |    99 -
 rust/datafusion/DEVELOPERS.md                      |    92 -
 rust/datafusion/Dockerfile                         |    25 -
 rust/datafusion/README.md                          |   356 -
 rust/datafusion/benches/aggregate_query_sql.rs     |   248 -
 rust/datafusion/benches/filter_query_sql.rs        |    91 -
 rust/datafusion/benches/math_query_sql.rs          |   111 -
 rust/datafusion/benches/scalar.rs                  |    30 -
 rust/datafusion/benches/sort_limit_query_sql.rs    |   148 -
 rust/datafusion/docs/cli.md                        |    95 -
 .../docs/images/DataFusion-Logo-Dark.png           |   Bin 20134 -> 0 bytes
 .../docs/images/DataFusion-Logo-Dark.svg           |     1 -
 .../docs/images/DataFusion-Logo-Light.png          |   Bin 19102 -> 0 bytes
 .../docs/images/DataFusion-Logo-Light.svg          |     1 -
 rust/datafusion/src/bin/main.rs                    |    25 -
 rust/datafusion/src/bin/repl.rs                    |   140 -
 rust/datafusion/src/catalog/catalog.rs             |   139 -
 rust/datafusion/src/catalog/information_schema.rs  |   492 -
 rust/datafusion/src/catalog/mod.rs                 |   146 -
 rust/datafusion/src/catalog/schema.rs              |   104 -
 rust/datafusion/src/dataframe.rs                   |   286 -
 rust/datafusion/src/datasource/csv.rs              |   144 -
 rust/datafusion/src/datasource/datasource.rs       |   103 -
 rust/datafusion/src/datasource/empty.rs            |    80 -
 rust/datafusion/src/datasource/memory.rs           |   472 -
 rust/datafusion/src/datasource/mod.rs              |    28 -
 rust/datafusion/src/datasource/parquet.rs          |   373 -
 rust/datafusion/src/error.rs                       |   120 -
 rust/datafusion/src/execution/context.rs           |  3123 --
 rust/datafusion/src/execution/dataframe_impl.rs    |   374 -
 rust/datafusion/src/execution/mod.rs               |    21 -
 rust/datafusion/src/lib.rs                         |   211 -
 rust/datafusion/src/logical_plan/builder.rs        |   595 -
 rust/datafusion/src/logical_plan/dfschema.rs       |   521 -
 rust/datafusion/src/logical_plan/display.rs        |   270 -
 rust/datafusion/src/logical_plan/expr.rs           |  1505 -
 rust/datafusion/src/logical_plan/extension.rs      |    79 -
 rust/datafusion/src/logical_plan/mod.rs            |    50 -
 rust/datafusion/src/logical_plan/operators.rs      |   135 -
 rust/datafusion/src/logical_plan/plan.rs           |  1095 -
 rust/datafusion/src/logical_plan/registry.rs       |    34 -
 rust/datafusion/src/optimizer/constant_folding.rs  |   591 -
 rust/datafusion/src/optimizer/filter_push_down.rs  |  1021 -
 .../src/optimizer/hash_build_probe_order.rs        |   257 -
 rust/datafusion/src/optimizer/limit_push_down.rs   |   252 -
 rust/datafusion/src/optimizer/mod.rs               |    27 -
 rust/datafusion/src/optimizer/optimizer.rs         |    32 -
 .../src/optimizer/projection_push_down.rs          |   542 -
 rust/datafusion/src/optimizer/utils.rs             |   489 -
 .../src/physical_optimizer/coalesce_batches.rs     |    88 -
 .../src/physical_optimizer/merge_exec.rs           |    74 -
 rust/datafusion/src/physical_optimizer/mod.rs      |    24 -
 .../datafusion/src/physical_optimizer/optimizer.rs |    39 -
 .../src/physical_optimizer/repartition.rs          |   186 -
 rust/datafusion/src/physical_plan/aggregates.rs    |   258 -
 .../src/physical_plan/array_expressions.rs         |   127 -
 .../src/physical_plan/coalesce_batches.rs          |   316 -
 rust/datafusion/src/physical_plan/common.rs        |   104 -
 .../src/physical_plan/crypto_expressions.rs        |   198 -
 rust/datafusion/src/physical_plan/csv.rs           |   401 -
 .../src/physical_plan/datetime_expressions.rs      |   559 -
 .../src/physical_plan/distinct_expressions.rs      |   557 -
 rust/datafusion/src/physical_plan/empty.rs         |   186 -
 rust/datafusion/src/physical_plan/explain.rs       |   125 -
 .../src/physical_plan/expressions/average.rs       |   293 -
 .../src/physical_plan/expressions/binary.rs        |  1101 -
 .../src/physical_plan/expressions/case.rs          |   597 -
 .../src/physical_plan/expressions/cast.rs          |   301 -
 .../src/physical_plan/expressions/coercion.rs      |   208 -
 .../src/physical_plan/expressions/column.rs        |    86 -
 .../src/physical_plan/expressions/count.rs         |   235 -
 .../src/physical_plan/expressions/in_list.rs       |   458 -
 .../src/physical_plan/expressions/is_not_null.rs   |   119 -
 .../src/physical_plan/expressions/is_null.rs       |   119 -
 .../src/physical_plan/expressions/literal.rs       |   108 -
 .../src/physical_plan/expressions/min_max.rs       |   655 -
 .../src/physical_plan/expressions/mod.rs           |   135 -
 .../src/physical_plan/expressions/negative.rs      |   133 -
 .../src/physical_plan/expressions/not.rs           |   158 -
 .../src/physical_plan/expressions/nullif.rs        |   188 -
 .../src/physical_plan/expressions/sum.rs           |   373 -
 .../src/physical_plan/expressions/try_cast.rs      |   247 -
 rust/datafusion/src/physical_plan/filter.rs        |   240 -
 rust/datafusion/src/physical_plan/functions.rs     |  3767 ---
 rust/datafusion/src/physical_plan/group_scalar.rs  |   212 -
 .../datafusion/src/physical_plan/hash_aggregate.rs |  1395 -
 rust/datafusion/src/physical_plan/hash_join.rs     |  1265 -
 rust/datafusion/src/physical_plan/hash_utils.rs    |   201 -
 rust/datafusion/src/physical_plan/limit.rs         |   338 -
 .../src/physical_plan/math_expressions.rs          |   118 -
 rust/datafusion/src/physical_plan/memory.rs        |   161 -
 rust/datafusion/src/physical_plan/merge.rs         |   225 -
 rust/datafusion/src/physical_plan/mod.rs           |   369 -
 rust/datafusion/src/physical_plan/parquet.rs       |  1535 -
 rust/datafusion/src/physical_plan/planner.rs       |  1106 -
 rust/datafusion/src/physical_plan/projection.rs    |   232 -
 .../src/physical_plan/regex_expressions.rs         |   172 -
 rust/datafusion/src/physical_plan/repartition.rs   |   461 -
 rust/datafusion/src/physical_plan/sort.rs          |   478 -
 .../src/physical_plan/string_expressions.rs        |   595 -
 rust/datafusion/src/physical_plan/type_coercion.rs |   361 -
 rust/datafusion/src/physical_plan/udaf.rs          |   168 -
 rust/datafusion/src/physical_plan/udf.rs           |   112 -
 .../src/physical_plan/unicode_expressions.rs       |   532 -
 rust/datafusion/src/physical_plan/union.rs         |   143 -
 rust/datafusion/src/prelude.rs                     |    37 -
 rust/datafusion/src/scalar.rs                      |   821 -
 rust/datafusion/src/sql/mod.rs                     |    23 -
 rust/datafusion/src/sql/parser.rs                  |   380 -
 rust/datafusion/src/sql/planner.rs                 |  2723 --
 rust/datafusion/src/sql/utils.rs                   |   376 -
 rust/datafusion/src/test/exec.rs                   |   102 -
 rust/datafusion/src/test/mod.rs                    |   346 -
 rust/datafusion/src/test/user_defined.rs           |    76 -
 rust/datafusion/src/test/variable.rs               |    58 -
 rust/datafusion/src/variable/mod.rs                |    36 -
 rust/datafusion/tests/aggregate_simple.csv         |    16 -
 rust/datafusion/tests/custom_sources.rs            |   200 -
 rust/datafusion/tests/customer.csv                 |     4 -
 rust/datafusion/tests/dataframe.rs                 |    79 -
 rust/datafusion/tests/example.csv                  |     2 -
 rust/datafusion/tests/provider_filter_pushdown.rs  |   177 -
 rust/datafusion/tests/sql.rs                       |  2707 --
 rust/datafusion/tests/user_defined_plan.rs         |   512 -
 4883 files changed, 1 insertion(+), 1190946 deletions(-)
 delete mode 100644 c_glib/.gitignore
 delete mode 100644 c_glib/Brewfile
 delete mode 100644 c_glib/Gemfile
 delete mode 100644 c_glib/README.md
 delete mode 100644 c_glib/arrow-cuda-glib/arrow-cuda-glib.h
 delete mode 100644 c_glib/arrow-cuda-glib/arrow-cuda-glib.hpp
 delete mode 100644 c_glib/arrow-cuda-glib/cuda.cpp
 delete mode 100644 c_glib/arrow-cuda-glib/cuda.h
 delete mode 100644 c_glib/arrow-cuda-glib/cuda.hpp
 delete mode 100644 c_glib/arrow-cuda-glib/meson.build
 delete mode 100644 c_glib/arrow-dataset-glib/arrow-dataset-glib.h
 delete mode 100644 c_glib/arrow-dataset-glib/arrow-dataset-glib.hpp
 delete mode 100644 c_glib/arrow-dataset-glib/file-format.cpp
 delete mode 100644 c_glib/arrow-dataset-glib/file-format.h
 delete mode 100644 c_glib/arrow-dataset-glib/file-format.hpp
 delete mode 100644 c_glib/arrow-dataset-glib/fragment.cpp
 delete mode 100644 c_glib/arrow-dataset-glib/fragment.h
 delete mode 100644 c_glib/arrow-dataset-glib/fragment.hpp
 delete mode 100644 c_glib/arrow-dataset-glib/meson.build
 delete mode 100644 c_glib/arrow-dataset-glib/scanner.cpp
 delete mode 100644 c_glib/arrow-dataset-glib/scanner.h
 delete mode 100644 c_glib/arrow-dataset-glib/scanner.hpp
 delete mode 100644 c_glib/arrow-glib/array-builder.cpp
 delete mode 100644 c_glib/arrow-glib/array-builder.h
 delete mode 100644 c_glib/arrow-glib/array-builder.hpp
 delete mode 100644 c_glib/arrow-glib/array.h
 delete mode 100644 c_glib/arrow-glib/array.hpp
 delete mode 100644 c_glib/arrow-glib/arrow-glib.h
 delete mode 100644 c_glib/arrow-glib/arrow-glib.hpp
 delete mode 100644 c_glib/arrow-glib/basic-array.cpp
 delete mode 100644 c_glib/arrow-glib/basic-array.h
 delete mode 100644 c_glib/arrow-glib/basic-array.hpp
 delete mode 100644 c_glib/arrow-glib/basic-data-type.cpp
 delete mode 100644 c_glib/arrow-glib/basic-data-type.h
 delete mode 100644 c_glib/arrow-glib/basic-data-type.hpp
 delete mode 100644 c_glib/arrow-glib/buffer.cpp
 delete mode 100644 c_glib/arrow-glib/buffer.h
 delete mode 100644 c_glib/arrow-glib/buffer.hpp
 delete mode 100644 c_glib/arrow-glib/chunked-array.cpp
 delete mode 100644 c_glib/arrow-glib/chunked-array.h
 delete mode 100644 c_glib/arrow-glib/chunked-array.hpp
 delete mode 100644 c_glib/arrow-glib/codec.cpp
 delete mode 100644 c_glib/arrow-glib/codec.h
 delete mode 100644 c_glib/arrow-glib/codec.hpp
 delete mode 100644 c_glib/arrow-glib/composite-array.cpp
 delete mode 100644 c_glib/arrow-glib/composite-array.h
 delete mode 100644 c_glib/arrow-glib/composite-data-type.cpp
 delete mode 100644 c_glib/arrow-glib/composite-data-type.h
 delete mode 100644 c_glib/arrow-glib/compute.cpp
 delete mode 100644 c_glib/arrow-glib/compute.h
 delete mode 100644 c_glib/arrow-glib/compute.hpp
 delete mode 100644 c_glib/arrow-glib/data-type.h
 delete mode 100644 c_glib/arrow-glib/data-type.hpp
 delete mode 100644 c_glib/arrow-glib/datum.cpp
 delete mode 100644 c_glib/arrow-glib/datum.h
 delete mode 100644 c_glib/arrow-glib/datum.hpp
 delete mode 100644 c_glib/arrow-glib/decimal.cpp
 delete mode 100644 c_glib/arrow-glib/decimal.h
 delete mode 100644 c_glib/arrow-glib/decimal.hpp
 delete mode 100644 c_glib/arrow-glib/enums.c.template
 delete mode 100644 c_glib/arrow-glib/enums.h.template
 delete mode 100644 c_glib/arrow-glib/error.cpp
 delete mode 100644 c_glib/arrow-glib/error.h
 delete mode 100644 c_glib/arrow-glib/error.hpp
 delete mode 100644 c_glib/arrow-glib/field.cpp
 delete mode 100644 c_glib/arrow-glib/field.h
 delete mode 100644 c_glib/arrow-glib/field.hpp
 delete mode 100644 c_glib/arrow-glib/file-mode.cpp
 delete mode 100644 c_glib/arrow-glib/file-mode.h
 delete mode 100644 c_glib/arrow-glib/file-mode.hpp
 delete mode 100644 c_glib/arrow-glib/file-system.cpp
 delete mode 100644 c_glib/arrow-glib/file-system.h
 delete mode 100644 c_glib/arrow-glib/file-system.hpp
 delete mode 100644 c_glib/arrow-glib/file.cpp
 delete mode 100644 c_glib/arrow-glib/file.h
 delete mode 100644 c_glib/arrow-glib/file.hpp
 delete mode 100644 c_glib/arrow-glib/gobject-type.h
 delete mode 100644 c_glib/arrow-glib/input-stream.cpp
 delete mode 100644 c_glib/arrow-glib/input-stream.h
 delete mode 100644 c_glib/arrow-glib/input-stream.hpp
 delete mode 100644 c_glib/arrow-glib/internal-hash-table.hpp
 delete mode 100644 c_glib/arrow-glib/internal-index.hpp
 delete mode 100644 c_glib/arrow-glib/ipc-options.cpp
 delete mode 100644 c_glib/arrow-glib/ipc-options.h
 delete mode 100644 c_glib/arrow-glib/ipc-options.hpp
 delete mode 100644 c_glib/arrow-glib/local-file-system.cpp
 delete mode 100644 c_glib/arrow-glib/local-file-system.h
 delete mode 100644 c_glib/arrow-glib/local-file-system.hpp
 delete mode 100644 c_glib/arrow-glib/meson.build
 delete mode 100644 c_glib/arrow-glib/metadata-version.cpp
 delete mode 100644 c_glib/arrow-glib/metadata-version.h
 delete mode 100644 c_glib/arrow-glib/metadata-version.hpp
 delete mode 100644 c_glib/arrow-glib/orc-file-reader.cpp
 delete mode 100644 c_glib/arrow-glib/orc-file-reader.h
 delete mode 100644 c_glib/arrow-glib/orc-file-reader.hpp
 delete mode 100644 c_glib/arrow-glib/output-stream.cpp
 delete mode 100644 c_glib/arrow-glib/output-stream.h
 delete mode 100644 c_glib/arrow-glib/output-stream.hpp
 delete mode 100644 c_glib/arrow-glib/readable.cpp
 delete mode 100644 c_glib/arrow-glib/readable.h
 delete mode 100644 c_glib/arrow-glib/readable.hpp
 delete mode 100644 c_glib/arrow-glib/reader.cpp
 delete mode 100644 c_glib/arrow-glib/reader.h
 delete mode 100644 c_glib/arrow-glib/reader.hpp
 delete mode 100644 c_glib/arrow-glib/record-batch.cpp
 delete mode 100644 c_glib/arrow-glib/record-batch.h
 delete mode 100644 c_glib/arrow-glib/record-batch.hpp
 delete mode 100644 c_glib/arrow-glib/schema.cpp
 delete mode 100644 c_glib/arrow-glib/schema.h
 delete mode 100644 c_glib/arrow-glib/schema.hpp
 delete mode 100644 c_glib/arrow-glib/table-builder.cpp
 delete mode 100644 c_glib/arrow-glib/table-builder.h
 delete mode 100644 c_glib/arrow-glib/table-builder.hpp
 delete mode 100644 c_glib/arrow-glib/table.cpp
 delete mode 100644 c_glib/arrow-glib/table.h
 delete mode 100644 c_glib/arrow-glib/table.hpp
 delete mode 100644 c_glib/arrow-glib/tensor.cpp
 delete mode 100644 c_glib/arrow-glib/tensor.h
 delete mode 100644 c_glib/arrow-glib/tensor.hpp
 delete mode 100644 c_glib/arrow-glib/type.cpp
 delete mode 100644 c_glib/arrow-glib/type.h
 delete mode 100644 c_glib/arrow-glib/type.hpp
 delete mode 100644 c_glib/arrow-glib/version.h.in
 delete mode 100644 c_glib/arrow-glib/writable-file.cpp
 delete mode 100644 c_glib/arrow-glib/writable-file.h
 delete mode 100644 c_glib/arrow-glib/writable-file.hpp
 delete mode 100644 c_glib/arrow-glib/writable.cpp
 delete mode 100644 c_glib/arrow-glib/writable.h
 delete mode 100644 c_glib/arrow-glib/writable.hpp
 delete mode 100644 c_glib/arrow-glib/writer.cpp
 delete mode 100644 c_glib/arrow-glib/writer.h
 delete mode 100644 c_glib/arrow-glib/writer.hpp
 delete mode 100644 c_glib/doc/arrow-dataset-glib/arrow-dataset-glib-docs.xml
 delete mode 100644 c_glib/doc/arrow-dataset-glib/entities.xml.in
 delete mode 100644 c_glib/doc/arrow-dataset-glib/meson.build
 delete mode 100644 c_glib/doc/arrow-glib/arrow-glib-docs.xml
 delete mode 100644 c_glib/doc/arrow-glib/entities.xml.in
 delete mode 100644 c_glib/doc/arrow-glib/meson.build
 delete mode 100644 c_glib/doc/gandiva-glib/entities.xml.in
 delete mode 100644 c_glib/doc/gandiva-glib/gandiva-glib-docs.xml
 delete mode 100644 c_glib/doc/gandiva-glib/meson.build
 delete mode 100644 c_glib/doc/parquet-glib/entities.xml.in
 delete mode 100644 c_glib/doc/parquet-glib/meson.build
 delete mode 100644 c_glib/doc/parquet-glib/parquet-glib-docs.xml
 delete mode 100644 c_glib/doc/plasma-glib/entities.xml.in
 delete mode 100644 c_glib/doc/plasma-glib/meson.build
 delete mode 100644 c_glib/doc/plasma-glib/plasma-glib-docs.xml
 delete mode 100644 c_glib/example/README.md
 delete mode 100644 c_glib/example/build.c
 delete mode 100644 c_glib/example/extension-type.c
 delete mode 100644 c_glib/example/lua/README.md
 delete mode 100644 c_glib/example/lua/meson.build
 delete mode 100644 c_glib/example/lua/read-batch.lua
 delete mode 100644 c_glib/example/lua/read-stream.lua
 delete mode 100644 c_glib/example/lua/write-batch.lua
 delete mode 100644 c_glib/example/lua/write-stream.lua
 delete mode 100644 c_glib/example/meson.build
 delete mode 100644 c_glib/example/read-batch.c
 delete mode 100644 c_glib/example/read-stream.c
 delete mode 100644 c_glib/gandiva-glib/enums.c.template
 delete mode 100644 c_glib/gandiva-glib/enums.h.template
 delete mode 100644 c_glib/gandiva-glib/expression.cpp
 delete mode 100644 c_glib/gandiva-glib/expression.h
 delete mode 100644 c_glib/gandiva-glib/expression.hpp
 delete mode 100644 c_glib/gandiva-glib/filter.cpp
 delete mode 100644 c_glib/gandiva-glib/filter.h
 delete mode 100644 c_glib/gandiva-glib/filter.hpp
 delete mode 100644 c_glib/gandiva-glib/function-registry.cpp
 delete mode 100644 c_glib/gandiva-glib/function-registry.h
 delete mode 100644 c_glib/gandiva-glib/function-signature.cpp
 delete mode 100644 c_glib/gandiva-glib/function-signature.h
 delete mode 100644 c_glib/gandiva-glib/function-signature.hpp
 delete mode 100644 c_glib/gandiva-glib/gandiva-glib.h
 delete mode 100644 c_glib/gandiva-glib/gandiva-glib.hpp
 delete mode 100644 c_glib/gandiva-glib/meson.build
 delete mode 100644 c_glib/gandiva-glib/native-function.cpp
 delete mode 100644 c_glib/gandiva-glib/native-function.h
 delete mode 100644 c_glib/gandiva-glib/native-function.hpp
 delete mode 100644 c_glib/gandiva-glib/node.cpp
 delete mode 100644 c_glib/gandiva-glib/node.h
 delete mode 100644 c_glib/gandiva-glib/node.hpp
 delete mode 100644 c_glib/gandiva-glib/projector.cpp
 delete mode 100644 c_glib/gandiva-glib/projector.h
 delete mode 100644 c_glib/gandiva-glib/projector.hpp
 delete mode 100644 c_glib/gandiva-glib/selection-vector.cpp
 delete mode 100644 c_glib/gandiva-glib/selection-vector.h
 delete mode 100644 c_glib/gandiva-glib/selection-vector.hpp
 delete mode 100644 c_glib/gandiva-glib/version.h.in
 delete mode 100644 c_glib/meson.build
 delete mode 100644 c_glib/meson_options.txt
 delete mode 100644 c_glib/parquet-glib/arrow-file-reader.cpp
 delete mode 100644 c_glib/parquet-glib/arrow-file-reader.h
 delete mode 100644 c_glib/parquet-glib/arrow-file-reader.hpp
 delete mode 100644 c_glib/parquet-glib/arrow-file-writer.cpp
 delete mode 100644 c_glib/parquet-glib/arrow-file-writer.h
 delete mode 100644 c_glib/parquet-glib/arrow-file-writer.hpp
 delete mode 100644 c_glib/parquet-glib/meson.build
 delete mode 100644 c_glib/parquet-glib/parquet-glib.h
 delete mode 100644 c_glib/parquet-glib/parquet-glib.hpp
 delete mode 100644 c_glib/parquet-glib/version.h.in
 delete mode 100644 c_glib/plasma-glib/client.cpp
 delete mode 100644 c_glib/plasma-glib/client.h
 delete mode 100644 c_glib/plasma-glib/client.hpp
 delete mode 100644 c_glib/plasma-glib/meson.build
 delete mode 100644 c_glib/plasma-glib/object.cpp
 delete mode 100644 c_glib/plasma-glib/object.h
 delete mode 100644 c_glib/plasma-glib/object.hpp
 delete mode 100644 c_glib/plasma-glib/plasma-glib.h
 delete mode 100644 c_glib/plasma-glib/plasma-glib.hpp
 delete mode 100644 c_glib/test/dataset/test-file-format.rb
 delete mode 100644 c_glib/test/dataset/test-in-memory-scan-task.rb
 delete mode 100644 c_glib/test/dataset/test-scan-options.rb
 delete mode 100644 c_glib/test/file-system-tests.rb
 delete mode 100644 c_glib/test/fixture/TestOrcFile.test1.orc
 delete mode 100644 c_glib/test/gandiva/test-binary-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-boolean-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-boolean-node.rb
 delete mode 100644 c_glib/test/gandiva/test-condition.rb
 delete mode 100644 c_glib/test/gandiva/test-double-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-expression.rb
 delete mode 100644 c_glib/test/gandiva/test-field-node.rb
 delete mode 100644 c_glib/test/gandiva/test-filter.rb
 delete mode 100644 c_glib/test/gandiva/test-float-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-function-node.rb
 delete mode 100644 c_glib/test/gandiva/test-function-registry.rb
 delete mode 100644 c_glib/test/gandiva/test-function-signature.rb
 delete mode 100644 c_glib/test/gandiva/test-if-node.rb
 delete mode 100644 c_glib/test/gandiva/test-int16-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-int32-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-int64-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-int8-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-native-function.rb
 delete mode 100644 c_glib/test/gandiva/test-null-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-projector.rb
 delete mode 100644 c_glib/test/gandiva/test-selectable-projector.rb
 delete mode 100644 c_glib/test/gandiva/test-selection-vector.rb
 delete mode 100644 c_glib/test/gandiva/test-string-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-uint16-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-uint32-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-uint64-literal-node.rb
 delete mode 100644 c_glib/test/gandiva/test-uint8-literal-node.rb
 delete mode 100644 c_glib/test/helper/buildable.rb
 delete mode 100644 c_glib/test/helper/data-type.rb
 delete mode 100644 c_glib/test/helper/fixture.rb
 delete mode 100644 c_glib/test/helper/omittable.rb
 delete mode 100644 c_glib/test/helper/plasma-store.rb
 delete mode 100644 c_glib/test/parquet/test-arrow-file-reader.rb
 delete mode 100644 c_glib/test/parquet/test-arrow-file-writer.rb
 delete mode 100644 c_glib/test/parquet/test-writer-properties.rb
 delete mode 100644 c_glib/test/plasma/test-plasma-client-options.rb
 delete mode 100644 c_glib/test/plasma/test-plasma-client.rb
 delete mode 100644 c_glib/test/plasma/test-plasma-created-object.rb
 delete mode 100644 c_glib/test/plasma/test-plasma-referred-object.rb
 delete mode 100755 c_glib/test/run-test.rb
 delete mode 100755 c_glib/test/run-test.sh
 delete mode 100644 c_glib/test/test-array-builder.rb
 delete mode 100644 c_glib/test/test-array-datum.rb
 delete mode 100644 c_glib/test/test-array-sort-options.rb
 delete mode 100644 c_glib/test/test-array.rb
 delete mode 100644 c_glib/test/test-binary-array.rb
 delete mode 100644 c_glib/test/test-binary-data-type.rb
 delete mode 100644 c_glib/test/test-boolean-array.rb
 delete mode 100644 c_glib/test/test-boolean-data-type.rb
 delete mode 100644 c_glib/test/test-buffer-input-stream.rb
 delete mode 100644 c_glib/test/test-buffer-output-stream.rb
 delete mode 100644 c_glib/test/test-buffer.rb
 delete mode 100644 c_glib/test/test-cast.rb
 delete mode 100644 c_glib/test/test-chunked-array-datum.rb
 delete mode 100644 c_glib/test/test-chunked-array.rb
 delete mode 100644 c_glib/test/test-codec.rb
 delete mode 100644 c_glib/test/test-compare.rb
 delete mode 100644 c_glib/test/test-compressed-input-stream.rb
 delete mode 100644 c_glib/test/test-compressed-output-stream.rb
 delete mode 100644 c_glib/test/test-count-values.rb
 delete mode 100644 c_glib/test/test-count.rb
 delete mode 100644 c_glib/test/test-csv-reader.rb
 delete mode 100644 c_glib/test/test-cuda.rb
 delete mode 100644 c_glib/test/test-date32-array.rb
 delete mode 100644 c_glib/test/test-date32-data-type.rb
 delete mode 100644 c_glib/test/test-date64-array.rb
 delete mode 100644 c_glib/test/test-date64-data-type.rb
 delete mode 100644 c_glib/test/test-decimal128-array.rb
 delete mode 100644 c_glib/test/test-decimal128-data-type.rb
 delete mode 100644 c_glib/test/test-decimal128.rb
 delete mode 100644 c_glib/test/test-decimal256-array.rb
 delete mode 100644 c_glib/test/test-decimal256-data-type.rb
 delete mode 100644 c_glib/test/test-decimal256.rb
 delete mode 100644 c_glib/test/test-dense-union-array.rb
 delete mode 100644 c_glib/test/test-dense-union-data-type.rb
 delete mode 100644 c_glib/test/test-dictionary-array-builder.rb
 delete mode 100644 c_glib/test/test-dictionary-array.rb
 delete mode 100644 c_glib/test/test-dictionary-data-type.rb
 delete mode 100644 c_glib/test/test-dictionary-encode.rb
 delete mode 100644 c_glib/test/test-double-array.rb
 delete mode 100644 c_glib/test/test-double-data-type.rb
 delete mode 100644 c_glib/test/test-extension-data-type.rb
 delete mode 100644 c_glib/test/test-feather-file-reader.rb
 delete mode 100644 c_glib/test/test-field.rb
 delete mode 100644 c_glib/test/test-file-info.rb
 delete mode 100644 c_glib/test/test-file-output-stream.rb
 delete mode 100644 c_glib/test/test-file-selector.rb
 delete mode 100644 c_glib/test/test-file-writer.rb
 delete mode 100644 c_glib/test/test-filter.rb
 delete mode 100644 c_glib/test/test-fixed-size-binary-array.rb
 delete mode 100644 c_glib/test/test-fixed-size-binary-data-type.rb
 delete mode 100644 c_glib/test/test-float-array.rb
 delete mode 100644 c_glib/test/test-float-data-type.rb
 delete mode 100644 c_glib/test/test-function.rb
 delete mode 100644 c_glib/test/test-gio-input-stream.rb
 delete mode 100644 c_glib/test/test-gio-output-stream.rb
 delete mode 100644 c_glib/test/test-int-array-builder.rb
 delete mode 100644 c_glib/test/test-int16-array.rb
 delete mode 100644 c_glib/test/test-int16-data-type.rb
 delete mode 100644 c_glib/test/test-int32-array.rb
 delete mode 100644 c_glib/test/test-int32-data-type.rb
 delete mode 100644 c_glib/test/test-int64-array.rb
 delete mode 100644 c_glib/test/test-int64-data-type.rb
 delete mode 100644 c_glib/test/test-int8-array.rb
 delete mode 100644 c_glib/test/test-int8-data-type.rb
 delete mode 100644 c_glib/test/test-is-in.rb
 delete mode 100644 c_glib/test/test-json-reader.rb
 delete mode 100644 c_glib/test/test-large-binary-array.rb
 delete mode 100644 c_glib/test/test-large-binary-data-type.rb
 delete mode 100644 c_glib/test/test-large-list-array.rb
 delete mode 100644 c_glib/test/test-large-list-data-type.rb
 delete mode 100644 c_glib/test/test-large-string-array.rb
 delete mode 100644 c_glib/test/test-large-string-data-type.rb
 delete mode 100644 c_glib/test/test-list-array.rb
 delete mode 100644 c_glib/test/test-list-data-type.rb
 delete mode 100644 c_glib/test/test-local-file-system.rb
 delete mode 100644 c_glib/test/test-map-array-builder.rb
 delete mode 100644 c_glib/test/test-map-array.rb
 delete mode 100644 c_glib/test/test-map-data-type.rb
 delete mode 100644 c_glib/test/test-memory-mapped-input-stream.rb
 delete mode 100644 c_glib/test/test-mock-file-system.rb
 delete mode 100644 c_glib/test/test-mutable-buffer.rb
 delete mode 100644 c_glib/test/test-null-array.rb
 delete mode 100644 c_glib/test/test-null-data-type.rb
 delete mode 100644 c_glib/test/test-numeric-array.rb
 delete mode 100644 c_glib/test/test-orc-file-reader.rb
 delete mode 100644 c_glib/test/test-read-options.rb
 delete mode 100644 c_glib/test/test-record-batch-builder.rb
 delete mode 100644 c_glib/test/test-record-batch-datum.rb
 delete mode 100644 c_glib/test/test-record-batch-iterator.rb
 delete mode 100644 c_glib/test/test-record-batch.rb
 delete mode 100644 c_glib/test/test-resizable-buffer.rb
 delete mode 100644 c_glib/test/test-schema.rb
 delete mode 100644 c_glib/test/test-slow-file-system.rb
 delete mode 100644 c_glib/test/test-sort-indices.rb
 delete mode 100644 c_glib/test/test-sort-options.rb
 delete mode 100644 c_glib/test/test-sparse-union-array.rb
 delete mode 100644 c_glib/test/test-sparse-union-data-type.rb
 delete mode 100644 c_glib/test/test-stream-writer.rb
 delete mode 100644 c_glib/test/test-string-array.rb
 delete mode 100644 c_glib/test/test-string-data-type.rb
 delete mode 100644 c_glib/test/test-struct-array.rb
 delete mode 100644 c_glib/test/test-struct-data-type.rb
 delete mode 100644 c_glib/test/test-table-batch-reader.rb
 delete mode 100644 c_glib/test/test-table-datum.rb
 delete mode 100644 c_glib/test/test-table.rb
 delete mode 100644 c_glib/test/test-take.rb
 delete mode 100644 c_glib/test/test-tensor.rb
 delete mode 100644 c_glib/test/test-time-data-type.rb
 delete mode 100644 c_glib/test/test-time32-array.rb
 delete mode 100644 c_glib/test/test-time32-data-type.rb
 delete mode 100644 c_glib/test/test-time64-array.rb
 delete mode 100644 c_glib/test/test-time64-data-type.rb
 delete mode 100644 c_glib/test/test-timestamp-array.rb
 delete mode 100644 c_glib/test/test-timestamp-data-type.rb
 delete mode 100644 c_glib/test/test-uint-array-builder.rb
 delete mode 100644 c_glib/test/test-uint16-array.rb
 delete mode 100644 c_glib/test/test-uint16-data-type.rb
 delete mode 100644 c_glib/test/test-uint32-array.rb
 delete mode 100644 c_glib/test/test-uint32-data-type.rb
 delete mode 100644 c_glib/test/test-uint64-array.rb
 delete mode 100644 c_glib/test/test-uint64-data-type.rb
 delete mode 100644 c_glib/test/test-uint8-array.rb
 delete mode 100644 c_glib/test/test-uint8-data-type.rb
 delete mode 100644 c_glib/test/test-unique.rb
 delete mode 100644 c_glib/test/test-write-options.rb
 delete mode 100644 cpp/.gitignore
 delete mode 100644 cpp/Brewfile
 delete mode 100644 cpp/CHANGELOG_PARQUET.md
 delete mode 100644 cpp/CMakeLists.txt
 delete mode 100644 cpp/CMakeSettings.json
 delete mode 100644 cpp/README.md
 delete mode 100644 cpp/apidoc/.gitignore
 delete mode 100644 cpp/apidoc/Doxyfile
 delete mode 100644 cpp/apidoc/HDFS.md
 delete mode 100644 cpp/apidoc/footer.html
 delete mode 100644 cpp/apidoc/tutorials/plasma.md
 delete mode 100644 cpp/apidoc/tutorials/tensor_to_py.md
 delete mode 100755 cpp/build-support/asan_symbolize.py
 delete mode 100755 cpp/build-support/build-lz4-lib.sh
 delete mode 100755 cpp/build-support/build-zstd-lib.sh
 delete mode 100755 cpp/build-support/cpplint.py
 delete mode 100755 cpp/build-support/fuzzing/generate_corpuses.sh
 delete mode 100755 cpp/build-support/fuzzing/pack_corpus.py
 delete mode 100755 cpp/build-support/get-upstream-commit.sh
 delete mode 100644 cpp/build-support/iwyu/iwyu-filter.awk
 delete mode 100755 cpp/build-support/iwyu/iwyu.sh
 delete mode 100755 cpp/build-support/iwyu/iwyu_tool.py
 delete mode 100644 cpp/build-support/iwyu/mappings/arrow-misc.imp
 delete mode 100644 cpp/build-support/iwyu/mappings/boost-all-private.imp
 delete mode 100644 cpp/build-support/iwyu/mappings/boost-all.imp
 delete mode 100644 cpp/build-support/iwyu/mappings/boost-extra.imp
 delete mode 100644 cpp/build-support/iwyu/mappings/gflags.imp
 delete mode 100644 cpp/build-support/iwyu/mappings/glog.imp
 delete mode 100644 cpp/build-support/iwyu/mappings/gmock.imp
 delete mode 100644 cpp/build-support/iwyu/mappings/gtest.imp
 delete mode 100755 cpp/build-support/lint_cpp_cli.py
 delete mode 100644 cpp/build-support/lint_exclusions.txt
 delete mode 100644 cpp/build-support/lintutils.py
 delete mode 100644 cpp/build-support/lsan-suppressions.txt
 delete mode 100755 cpp/build-support/run-infer.sh
 delete mode 100755 cpp/build-support/run-test.sh
 delete mode 100755 cpp/build-support/run_clang_format.py
 delete mode 100755 cpp/build-support/run_clang_tidy.py
 delete mode 100755 cpp/build-support/run_cpplint.py
 delete mode 100644 cpp/build-support/sanitizer-disallowed-entries.txt
 delete mode 100755 cpp/build-support/stacktrace_addr2line.pl
 delete mode 100755 cpp/build-support/trim-boost.sh
 delete mode 100644 cpp/build-support/tsan-suppressions.txt
 delete mode 100644 cpp/build-support/ubsan-suppressions.txt
 delete mode 100755 cpp/build-support/update-flatbuffers.sh
 delete mode 100755 cpp/build-support/update-thrift.sh
 delete mode 100755 cpp/build-support/vendor-flatbuffers.sh
 delete mode 100644 cpp/cmake_modules/BuildUtils.cmake
 delete mode 100644 cpp/cmake_modules/DefineOptions.cmake
 delete mode 100644 cpp/cmake_modules/Find-c-aresAlt.cmake
 delete mode 100644 cpp/cmake_modules/FindArrow.cmake
 delete mode 100644 cpp/cmake_modules/FindArrowCUDA.cmake
 delete mode 100644 cpp/cmake_modules/FindArrowDataset.cmake
 delete mode 100644 cpp/cmake_modules/FindArrowFlight.cmake
 delete mode 100644 cpp/cmake_modules/FindArrowFlightTesting.cmake
 delete mode 100644 cpp/cmake_modules/FindArrowPython.cmake
 delete mode 100644 cpp/cmake_modules/FindArrowPythonFlight.cmake
 delete mode 100644 cpp/cmake_modules/FindArrowTesting.cmake
 delete mode 100644 cpp/cmake_modules/FindBoostAlt.cmake
 delete mode 100644 cpp/cmake_modules/FindBrotli.cmake
 delete mode 100644 cpp/cmake_modules/FindClangTools.cmake
 delete mode 100644 cpp/cmake_modules/FindGLOG.cmake
 delete mode 100644 cpp/cmake_modules/FindGandiva.cmake
 delete mode 100644 cpp/cmake_modules/FindInferTools.cmake
 delete mode 100644 cpp/cmake_modules/FindLLVMAlt.cmake
 delete mode 100644 cpp/cmake_modules/FindLz4.cmake
 delete mode 100644 cpp/cmake_modules/FindNumPy.cmake
 delete mode 100644 cpp/cmake_modules/FindORC.cmake
 delete mode 100644 cpp/cmake_modules/FindOpenSSLAlt.cmake
 delete mode 100644 cpp/cmake_modules/FindParquet.cmake
 delete mode 100644 cpp/cmake_modules/FindPlasma.cmake
 delete mode 100644 cpp/cmake_modules/FindPython3Alt.cmake
 delete mode 100644 cpp/cmake_modules/FindPythonLibsNew.cmake
 delete mode 100644 cpp/cmake_modules/FindRapidJSONAlt.cmake
 delete mode 100644 cpp/cmake_modules/FindSnappy.cmake
 delete mode 100644 cpp/cmake_modules/FindThrift.cmake
 delete mode 100644 cpp/cmake_modules/FindgRPCAlt.cmake
 delete mode 100644 cpp/cmake_modules/FindgflagsAlt.cmake
 delete mode 100644 cpp/cmake_modules/Findjemalloc.cmake
 delete mode 100644 cpp/cmake_modules/Findre2Alt.cmake
 delete mode 100644 cpp/cmake_modules/Findutf8proc.cmake
 delete mode 100644 cpp/cmake_modules/Findzstd.cmake
 delete mode 100644 cpp/cmake_modules/SetupCxxFlags.cmake
 delete mode 100644 cpp/cmake_modules/ThirdpartyToolchain.cmake
 delete mode 100644 cpp/cmake_modules/UseCython.cmake
 delete mode 100644 cpp/cmake_modules/Usevcpkg.cmake
 delete mode 100644 cpp/cmake_modules/san-config.cmake
 delete mode 100644 cpp/examples/arrow/CMakeLists.txt
 delete mode 100644 cpp/examples/arrow/dataset_documentation_example.cc
 delete mode 100644 cpp/examples/arrow/dataset_parquet_scan_example.cc
 delete mode 100644 cpp/examples/arrow/row_wise_conversion_example.cc
 delete mode 100644 cpp/examples/minimal_build/.gitignore
 delete mode 100644 cpp/examples/minimal_build/CMakeLists.txt
 delete mode 100644 cpp/examples/minimal_build/README.md
 delete mode 100755 cpp/examples/minimal_build/build_arrow.sh
 delete mode 100755 cpp/examples/minimal_build/build_example.sh
 delete mode 100644 cpp/examples/minimal_build/docker-compose.yml
 delete mode 100644 cpp/examples/minimal_build/example.cc
 delete mode 100644 cpp/examples/minimal_build/minimal.dockerfile
 delete mode 100755 cpp/examples/minimal_build/run.sh
 delete mode 100644 cpp/examples/minimal_build/run_static.bat
 delete mode 100755 cpp/examples/minimal_build/run_static.sh
 delete mode 100644 cpp/examples/minimal_build/system_dependency.dockerfile
 delete mode 100644 cpp/examples/minimal_build/test.csv
 delete mode 100644 cpp/examples/parquet/CMakeLists.txt
 delete mode 100644 cpp/examples/parquet/low_level_api/encryption_reader_writer.cc
 delete mode 100644 cpp/examples/parquet/low_level_api/encryption_reader_writer_all_crypto_options.cc
 delete mode 100644 cpp/examples/parquet/low_level_api/reader_writer.cc
 delete mode 100644 cpp/examples/parquet/low_level_api/reader_writer.h
 delete mode 100644 cpp/examples/parquet/low_level_api/reader_writer2.cc
 delete mode 100644 cpp/examples/parquet/parquet_arrow/CMakeLists.txt
 delete mode 100644 cpp/examples/parquet/parquet_arrow/README.md
 delete mode 100644 cpp/examples/parquet/parquet_arrow/reader_writer.cc
 delete mode 100644 cpp/examples/parquet/parquet_stream_api/stream_reader_writer.cc
 delete mode 100644 cpp/src/arrow/ArrowConfig.cmake.in
 delete mode 100644 cpp/src/arrow/ArrowTestingConfig.cmake.in
 delete mode 100644 cpp/src/arrow/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/adapters/orc/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/adapters/orc/adapter.cc
 delete mode 100644 cpp/src/arrow/adapters/orc/adapter.h
 delete mode 100644 cpp/src/arrow/adapters/orc/adapter_test.cc
 delete mode 100644 cpp/src/arrow/adapters/orc/adapter_util.cc
 delete mode 100644 cpp/src/arrow/adapters/orc/adapter_util.h
 delete mode 100644 cpp/src/arrow/adapters/orc/arrow-orc.pc.in
 delete mode 100644 cpp/src/arrow/adapters/tensorflow/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/adapters/tensorflow/arrow-tensorflow.pc.in
 delete mode 100644 cpp/src/arrow/adapters/tensorflow/convert.h
 delete mode 100644 cpp/src/arrow/api.h
 delete mode 100644 cpp/src/arrow/array.h
 delete mode 100644 cpp/src/arrow/array/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/array/README.md
 delete mode 100644 cpp/src/arrow/array/array_base.cc
 delete mode 100644 cpp/src/arrow/array/array_base.h
 delete mode 100644 cpp/src/arrow/array/array_binary.cc
 delete mode 100644 cpp/src/arrow/array/array_binary.h
 delete mode 100644 cpp/src/arrow/array/array_binary_test.cc
 delete mode 100644 cpp/src/arrow/array/array_decimal.cc
 delete mode 100644 cpp/src/arrow/array/array_decimal.h
 delete mode 100644 cpp/src/arrow/array/array_dict.cc
 delete mode 100644 cpp/src/arrow/array/array_dict.h
 delete mode 100644 cpp/src/arrow/array/array_dict_test.cc
 delete mode 100644 cpp/src/arrow/array/array_list_test.cc
 delete mode 100644 cpp/src/arrow/array/array_nested.cc
 delete mode 100644 cpp/src/arrow/array/array_nested.h
 delete mode 100644 cpp/src/arrow/array/array_primitive.cc
 delete mode 100644 cpp/src/arrow/array/array_primitive.h
 delete mode 100644 cpp/src/arrow/array/array_struct_test.cc
 delete mode 100644 cpp/src/arrow/array/array_test.cc
 delete mode 100644 cpp/src/arrow/array/array_union_test.cc
 delete mode 100644 cpp/src/arrow/array/array_view_test.cc
 delete mode 100644 cpp/src/arrow/array/builder_adaptive.cc
 delete mode 100644 cpp/src/arrow/array/builder_adaptive.h
 delete mode 100644 cpp/src/arrow/array/builder_base.cc
 delete mode 100644 cpp/src/arrow/array/builder_base.h
 delete mode 100644 cpp/src/arrow/array/builder_binary.cc
 delete mode 100644 cpp/src/arrow/array/builder_binary.h
 delete mode 100644 cpp/src/arrow/array/builder_decimal.cc
 delete mode 100644 cpp/src/arrow/array/builder_decimal.h
 delete mode 100644 cpp/src/arrow/array/builder_dict.cc
 delete mode 100644 cpp/src/arrow/array/builder_dict.h
 delete mode 100644 cpp/src/arrow/array/builder_nested.cc
 delete mode 100644 cpp/src/arrow/array/builder_nested.h
 delete mode 100644 cpp/src/arrow/array/builder_primitive.cc
 delete mode 100644 cpp/src/arrow/array/builder_primitive.h
 delete mode 100644 cpp/src/arrow/array/builder_time.h
 delete mode 100644 cpp/src/arrow/array/builder_union.cc
 delete mode 100644 cpp/src/arrow/array/builder_union.h
 delete mode 100644 cpp/src/arrow/array/concatenate.cc
 delete mode 100644 cpp/src/arrow/array/concatenate.h
 delete mode 100644 cpp/src/arrow/array/concatenate_test.cc
 delete mode 100644 cpp/src/arrow/array/data.cc
 delete mode 100644 cpp/src/arrow/array/data.h
 delete mode 100644 cpp/src/arrow/array/dict_internal.h
 delete mode 100644 cpp/src/arrow/array/diff.cc
 delete mode 100644 cpp/src/arrow/array/diff.h
 delete mode 100644 cpp/src/arrow/array/diff_test.cc
 delete mode 100644 cpp/src/arrow/array/util.cc
 delete mode 100644 cpp/src/arrow/array/util.h
 delete mode 100644 cpp/src/arrow/array/validate.cc
 delete mode 100644 cpp/src/arrow/array/validate.h
 delete mode 100644 cpp/src/arrow/arrow-config.cmake
 delete mode 100644 cpp/src/arrow/arrow-testing.pc.in
 delete mode 100644 cpp/src/arrow/arrow.pc.in
 delete mode 100644 cpp/src/arrow/buffer.cc
 delete mode 100644 cpp/src/arrow/buffer.h
 delete mode 100644 cpp/src/arrow/buffer_builder.h
 delete mode 100644 cpp/src/arrow/buffer_test.cc
 delete mode 100644 cpp/src/arrow/builder.cc
 delete mode 100644 cpp/src/arrow/builder.h
 delete mode 100644 cpp/src/arrow/builder_benchmark.cc
 delete mode 100644 cpp/src/arrow/c/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/c/abi.h
 delete mode 100644 cpp/src/arrow/c/bridge.cc
 delete mode 100644 cpp/src/arrow/c/bridge.h
 delete mode 100644 cpp/src/arrow/c/bridge_benchmark.cc
 delete mode 100644 cpp/src/arrow/c/bridge_test.cc
 delete mode 100644 cpp/src/arrow/c/helpers.h
 delete mode 100644 cpp/src/arrow/c/util_internal.h
 delete mode 100644 cpp/src/arrow/chunked_array.cc
 delete mode 100644 cpp/src/arrow/chunked_array.h
 delete mode 100644 cpp/src/arrow/chunked_array_test.cc
 delete mode 100644 cpp/src/arrow/compare.cc
 delete mode 100644 cpp/src/arrow/compare.h
 delete mode 100644 cpp/src/arrow/compare_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/compute/README.md
 delete mode 100644 cpp/src/arrow/compute/api.h
 delete mode 100644 cpp/src/arrow/compute/api_aggregate.cc
 delete mode 100644 cpp/src/arrow/compute/api_aggregate.h
 delete mode 100644 cpp/src/arrow/compute/api_scalar.cc
 delete mode 100644 cpp/src/arrow/compute/api_scalar.h
 delete mode 100644 cpp/src/arrow/compute/api_vector.cc
 delete mode 100644 cpp/src/arrow/compute/api_vector.h
 delete mode 100644 cpp/src/arrow/compute/arrow-compute.pc.in
 delete mode 100644 cpp/src/arrow/compute/cast.cc
 delete mode 100644 cpp/src/arrow/compute/cast.h
 delete mode 100644 cpp/src/arrow/compute/cast_internal.h
 delete mode 100644 cpp/src/arrow/compute/exec.cc
 delete mode 100644 cpp/src/arrow/compute/exec.h
 delete mode 100644 cpp/src/arrow/compute/exec_internal.h
 delete mode 100644 cpp/src/arrow/compute/exec_test.cc
 delete mode 100644 cpp/src/arrow/compute/function.cc
 delete mode 100644 cpp/src/arrow/compute/function.h
 delete mode 100644 cpp/src/arrow/compute/function_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/function_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernel.cc
 delete mode 100644 cpp/src/arrow/compute/kernel.h
 delete mode 100644 cpp/src/arrow/compute/kernel_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_basic.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_basic_avx2.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_basic_avx512.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_basic_internal.h
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_internal.h
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_mode.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_quantile.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_tdigest.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/aggregate_var_std.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/codegen_internal.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/codegen_internal.h
 delete mode 100644 cpp/src/arrow/compute/kernels/common.h
 delete mode 100644 cpp/src/arrow/compute/kernels/hash_aggregate.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/hash_aggregate_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_arithmetic.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_arithmetic_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_arithmetic_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_boolean.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_boolean_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_boolean_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_cast_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_cast_boolean.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_cast_internal.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_cast_internal.h
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_cast_nested.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_cast_numeric.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_cast_string.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_cast_temporal.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_cast_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_compare.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_compare_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_compare_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_fill_null.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_fill_null_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_nested.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_nested_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_set_lookup.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_set_lookup_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_set_lookup_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_string.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_string_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_string_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_validity.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/scalar_validity_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/test_util.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/test_util.h
 delete mode 100644 cpp/src/arrow/compute/kernels/util_internal.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/util_internal.h
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_hash.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_hash_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_hash_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_nested.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_nested_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_partition_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_selection.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_selection_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_selection_test.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_sort.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_sort_benchmark.cc
 delete mode 100644 cpp/src/arrow/compute/kernels/vector_sort_test.cc
 delete mode 100644 cpp/src/arrow/compute/registry.cc
 delete mode 100644 cpp/src/arrow/compute/registry.h
 delete mode 100644 cpp/src/arrow/compute/registry_internal.h
 delete mode 100644 cpp/src/arrow/compute/registry_test.cc
 delete mode 100644 cpp/src/arrow/compute/type_fwd.h
 delete mode 100644 cpp/src/arrow/compute/util_internal.h
 delete mode 100644 cpp/src/arrow/config.cc
 delete mode 100644 cpp/src/arrow/config.h
 delete mode 100644 cpp/src/arrow/csv/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/csv/api.h
 delete mode 100644 cpp/src/arrow/csv/arrow-csv.pc.in
 delete mode 100644 cpp/src/arrow/csv/chunker.cc
 delete mode 100644 cpp/src/arrow/csv/chunker.h
 delete mode 100644 cpp/src/arrow/csv/chunker_test.cc
 delete mode 100644 cpp/src/arrow/csv/column_builder.cc
 delete mode 100644 cpp/src/arrow/csv/column_builder.h
 delete mode 100644 cpp/src/arrow/csv/column_builder_test.cc
 delete mode 100644 cpp/src/arrow/csv/column_decoder.cc
 delete mode 100644 cpp/src/arrow/csv/column_decoder.h
 delete mode 100644 cpp/src/arrow/csv/column_decoder_test.cc
 delete mode 100644 cpp/src/arrow/csv/converter.cc
 delete mode 100644 cpp/src/arrow/csv/converter.h
 delete mode 100644 cpp/src/arrow/csv/converter_benchmark.cc
 delete mode 100644 cpp/src/arrow/csv/converter_test.cc
 delete mode 100644 cpp/src/arrow/csv/inference_internal.h
 delete mode 100644 cpp/src/arrow/csv/options.cc
 delete mode 100644 cpp/src/arrow/csv/options.h
 delete mode 100644 cpp/src/arrow/csv/parser.cc
 delete mode 100644 cpp/src/arrow/csv/parser.h
 delete mode 100644 cpp/src/arrow/csv/parser_benchmark.cc
 delete mode 100644 cpp/src/arrow/csv/parser_test.cc
 delete mode 100644 cpp/src/arrow/csv/reader.cc
 delete mode 100644 cpp/src/arrow/csv/reader.h
 delete mode 100644 cpp/src/arrow/csv/reader_test.cc
 delete mode 100644 cpp/src/arrow/csv/test_common.cc
 delete mode 100644 cpp/src/arrow/csv/test_common.h
 delete mode 100644 cpp/src/arrow/csv/type_fwd.h
 delete mode 100644 cpp/src/arrow/csv/writer.cc
 delete mode 100644 cpp/src/arrow/csv/writer.h
 delete mode 100644 cpp/src/arrow/csv/writer_test.cc
 delete mode 100644 cpp/src/arrow/dataset/ArrowDatasetConfig.cmake.in
 delete mode 100644 cpp/src/arrow/dataset/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/dataset/README.md
 delete mode 100644 cpp/src/arrow/dataset/api.h
 delete mode 100644 cpp/src/arrow/dataset/arrow-dataset.pc.in
 delete mode 100644 cpp/src/arrow/dataset/dataset.cc
 delete mode 100644 cpp/src/arrow/dataset/dataset.h
 delete mode 100644 cpp/src/arrow/dataset/dataset_internal.h
 delete mode 100644 cpp/src/arrow/dataset/dataset_test.cc
 delete mode 100644 cpp/src/arrow/dataset/discovery.cc
 delete mode 100644 cpp/src/arrow/dataset/discovery.h
 delete mode 100644 cpp/src/arrow/dataset/discovery_test.cc
 delete mode 100644 cpp/src/arrow/dataset/expression.cc
 delete mode 100644 cpp/src/arrow/dataset/expression.h
 delete mode 100644 cpp/src/arrow/dataset/expression_benchmark.cc
 delete mode 100644 cpp/src/arrow/dataset/expression_internal.h
 delete mode 100644 cpp/src/arrow/dataset/expression_test.cc
 delete mode 100644 cpp/src/arrow/dataset/file_base.cc
 delete mode 100644 cpp/src/arrow/dataset/file_base.h
 delete mode 100644 cpp/src/arrow/dataset/file_benchmark.cc
 delete mode 100644 cpp/src/arrow/dataset/file_csv.cc
 delete mode 100644 cpp/src/arrow/dataset/file_csv.h
 delete mode 100644 cpp/src/arrow/dataset/file_csv_test.cc
 delete mode 100644 cpp/src/arrow/dataset/file_ipc.cc
 delete mode 100644 cpp/src/arrow/dataset/file_ipc.h
 delete mode 100644 cpp/src/arrow/dataset/file_ipc_test.cc
 delete mode 100644 cpp/src/arrow/dataset/file_parquet.cc
 delete mode 100644 cpp/src/arrow/dataset/file_parquet.h
 delete mode 100644 cpp/src/arrow/dataset/file_parquet_test.cc
 delete mode 100644 cpp/src/arrow/dataset/file_test.cc
 delete mode 100644 cpp/src/arrow/dataset/forest_internal.h
 delete mode 100644 cpp/src/arrow/dataset/partition.cc
 delete mode 100644 cpp/src/arrow/dataset/partition.h
 delete mode 100644 cpp/src/arrow/dataset/partition_test.cc
 delete mode 100644 cpp/src/arrow/dataset/pch.h
 delete mode 100644 cpp/src/arrow/dataset/projector.cc
 delete mode 100644 cpp/src/arrow/dataset/projector.h
 delete mode 100644 cpp/src/arrow/dataset/scanner.cc
 delete mode 100644 cpp/src/arrow/dataset/scanner.h
 delete mode 100644 cpp/src/arrow/dataset/scanner_internal.h
 delete mode 100644 cpp/src/arrow/dataset/scanner_test.cc
 delete mode 100644 cpp/src/arrow/dataset/test_util.h
 delete mode 100644 cpp/src/arrow/dataset/type_fwd.h
 delete mode 100644 cpp/src/arrow/dataset/visibility.h
 delete mode 100644 cpp/src/arrow/datum.cc
 delete mode 100644 cpp/src/arrow/datum.h
 delete mode 100644 cpp/src/arrow/datum_test.cc
 delete mode 100644 cpp/src/arrow/dbi/README.md
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/api.h
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/columnar_row_set.cc
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/columnar_row_set.h
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/hiveserver2_test.cc
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/operation.cc
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/operation.h
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/public_api_test.cc
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/sample_usage.cc
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/service.cc
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/service.h
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/session.cc
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/session.h
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/.gitignore
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/ExecStats.thrift
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/ImpalaService.thrift
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/Status.thrift
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/TCLIService.thrift
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/Types.thrift
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/beeswax.thrift
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/fb303.thrift
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/generate_error_codes.py
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift/hive_metastore.thrift
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift_internal.cc
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/thrift_internal.h
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/types.cc
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/types.h
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/util.cc
 delete mode 100644 cpp/src/arrow/dbi/hiveserver2/util.h
 delete mode 100644 cpp/src/arrow/device.cc
 delete mode 100644 cpp/src/arrow/device.h
 delete mode 100644 cpp/src/arrow/extension_type.cc
 delete mode 100644 cpp/src/arrow/extension_type.h
 delete mode 100644 cpp/src/arrow/extension_type_test.cc
 delete mode 100644 cpp/src/arrow/filesystem/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/filesystem/api.h
 delete mode 100644 cpp/src/arrow/filesystem/arrow-filesystem.pc.in
 delete mode 100644 cpp/src/arrow/filesystem/filesystem.cc
 delete mode 100644 cpp/src/arrow/filesystem/filesystem.h
 delete mode 100644 cpp/src/arrow/filesystem/filesystem_test.cc
 delete mode 100644 cpp/src/arrow/filesystem/hdfs.cc
 delete mode 100644 cpp/src/arrow/filesystem/hdfs.h
 delete mode 100644 cpp/src/arrow/filesystem/hdfs_test.cc
 delete mode 100644 cpp/src/arrow/filesystem/localfs.cc
 delete mode 100644 cpp/src/arrow/filesystem/localfs.h
 delete mode 100644 cpp/src/arrow/filesystem/localfs_test.cc
 delete mode 100644 cpp/src/arrow/filesystem/mockfs.cc
 delete mode 100644 cpp/src/arrow/filesystem/mockfs.h
 delete mode 100644 cpp/src/arrow/filesystem/path_util.cc
 delete mode 100644 cpp/src/arrow/filesystem/path_util.h
 delete mode 100644 cpp/src/arrow/filesystem/s3_internal.h
 delete mode 100644 cpp/src/arrow/filesystem/s3_test_util.h
 delete mode 100644 cpp/src/arrow/filesystem/s3fs.cc
 delete mode 100644 cpp/src/arrow/filesystem/s3fs.h
 delete mode 100644 cpp/src/arrow/filesystem/s3fs_benchmark.cc
 delete mode 100644 cpp/src/arrow/filesystem/s3fs_narrative_test.cc
 delete mode 100644 cpp/src/arrow/filesystem/s3fs_test.cc
 delete mode 100644 cpp/src/arrow/filesystem/test_util.cc
 delete mode 100644 cpp/src/arrow/filesystem/test_util.h
 delete mode 100644 cpp/src/arrow/filesystem/type_fwd.h
 delete mode 100644 cpp/src/arrow/filesystem/util_internal.cc
 delete mode 100644 cpp/src/arrow/filesystem/util_internal.h
 delete mode 100644 cpp/src/arrow/flight/ArrowFlightConfig.cmake.in
 delete mode 100644 cpp/src/arrow/flight/ArrowFlightTestingConfig.cmake.in
 delete mode 100644 cpp/src/arrow/flight/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/flight/README.md
 delete mode 100644 cpp/src/arrow/flight/api.h
 delete mode 100644 cpp/src/arrow/flight/arrow-flight-testing.pc.in
 delete mode 100644 cpp/src/arrow/flight/arrow-flight.pc.in
 delete mode 100644 cpp/src/arrow/flight/client.cc
 delete mode 100644 cpp/src/arrow/flight/client.h
 delete mode 100644 cpp/src/arrow/flight/client_auth.h
 delete mode 100644 cpp/src/arrow/flight/client_cookie_middleware.cc
 delete mode 100644 cpp/src/arrow/flight/client_cookie_middleware.h
 delete mode 100644 cpp/src/arrow/flight/client_header_internal.cc
 delete mode 100644 cpp/src/arrow/flight/client_header_internal.h
 delete mode 100644 cpp/src/arrow/flight/client_middleware.h
 delete mode 100644 cpp/src/arrow/flight/customize_protobuf.h
 delete mode 100644 cpp/src/arrow/flight/flight_benchmark.cc
 delete mode 100644 cpp/src/arrow/flight/flight_test.cc
 delete mode 100644 cpp/src/arrow/flight/internal.cc
 delete mode 100644 cpp/src/arrow/flight/internal.h
 delete mode 100644 cpp/src/arrow/flight/middleware.h
 delete mode 100644 cpp/src/arrow/flight/middleware_internal.h
 delete mode 100644 cpp/src/arrow/flight/pch.h
 delete mode 100644 cpp/src/arrow/flight/perf.proto
 delete mode 100644 cpp/src/arrow/flight/perf_server.cc
 delete mode 100644 cpp/src/arrow/flight/platform.h
 delete mode 100644 cpp/src/arrow/flight/protocol_internal.cc
 delete mode 100644 cpp/src/arrow/flight/protocol_internal.h
 delete mode 100644 cpp/src/arrow/flight/serialization_internal.cc
 delete mode 100644 cpp/src/arrow/flight/serialization_internal.h
 delete mode 100644 cpp/src/arrow/flight/server.cc
 delete mode 100644 cpp/src/arrow/flight/server.h
 delete mode 100644 cpp/src/arrow/flight/server_auth.cc
 delete mode 100644 cpp/src/arrow/flight/server_auth.h
 delete mode 100644 cpp/src/arrow/flight/server_middleware.h
 delete mode 100644 cpp/src/arrow/flight/test_integration.cc
 delete mode 100644 cpp/src/arrow/flight/test_integration.h
 delete mode 100644 cpp/src/arrow/flight/test_integration_client.cc
 delete mode 100644 cpp/src/arrow/flight/test_integration_server.cc
 delete mode 100644 cpp/src/arrow/flight/test_server.cc
 delete mode 100644 cpp/src/arrow/flight/test_util.cc
 delete mode 100644 cpp/src/arrow/flight/test_util.h
 delete mode 100644 cpp/src/arrow/flight/try_compile/check_tls_opts_127.cc
 delete mode 100644 cpp/src/arrow/flight/try_compile/check_tls_opts_132.cc
 delete mode 100644 cpp/src/arrow/flight/try_compile/check_tls_opts_134.cc
 delete mode 100644 cpp/src/arrow/flight/try_compile/check_tls_opts_136.cc
 delete mode 100644 cpp/src/arrow/flight/types.cc
 delete mode 100644 cpp/src/arrow/flight/types.h
 delete mode 100644 cpp/src/arrow/flight/visibility.h
 delete mode 100644 cpp/src/arrow/gpu/.gitignore
 delete mode 100644 cpp/src/arrow/gpu/ArrowCUDAConfig.cmake.in
 delete mode 100644 cpp/src/arrow/gpu/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/gpu/arrow-cuda.pc.in
 delete mode 100644 cpp/src/arrow/gpu/cuda_api.h
 delete mode 100644 cpp/src/arrow/gpu/cuda_arrow_ipc.cc
 delete mode 100644 cpp/src/arrow/gpu/cuda_arrow_ipc.h
 delete mode 100644 cpp/src/arrow/gpu/cuda_benchmark.cc
 delete mode 100644 cpp/src/arrow/gpu/cuda_context.cc
 delete mode 100644 cpp/src/arrow/gpu/cuda_context.h
 delete mode 100644 cpp/src/arrow/gpu/cuda_internal.cc
 delete mode 100644 cpp/src/arrow/gpu/cuda_internal.h
 delete mode 100644 cpp/src/arrow/gpu/cuda_memory.cc
 delete mode 100644 cpp/src/arrow/gpu/cuda_memory.h
 delete mode 100644 cpp/src/arrow/gpu/cuda_test.cc
 delete mode 100644 cpp/src/arrow/gpu/cuda_version.h.in
 delete mode 100644 cpp/src/arrow/io/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/io/api.h
 delete mode 100644 cpp/src/arrow/io/buffered.cc
 delete mode 100644 cpp/src/arrow/io/buffered.h
 delete mode 100644 cpp/src/arrow/io/buffered_test.cc
 delete mode 100644 cpp/src/arrow/io/caching.cc
 delete mode 100644 cpp/src/arrow/io/caching.h
 delete mode 100644 cpp/src/arrow/io/compressed.cc
 delete mode 100644 cpp/src/arrow/io/compressed.h
 delete mode 100644 cpp/src/arrow/io/compressed_test.cc
 delete mode 100644 cpp/src/arrow/io/concurrency.h
 delete mode 100644 cpp/src/arrow/io/file.cc
 delete mode 100644 cpp/src/arrow/io/file.h
 delete mode 100644 cpp/src/arrow/io/file_benchmark.cc
 delete mode 100644 cpp/src/arrow/io/file_test.cc
 delete mode 100644 cpp/src/arrow/io/hdfs.cc
 delete mode 100644 cpp/src/arrow/io/hdfs.h
 delete mode 100644 cpp/src/arrow/io/hdfs_internal.cc
 delete mode 100644 cpp/src/arrow/io/hdfs_internal.h
 delete mode 100644 cpp/src/arrow/io/hdfs_test.cc
 delete mode 100644 cpp/src/arrow/io/interfaces.cc
 delete mode 100644 cpp/src/arrow/io/interfaces.h
 delete mode 100644 cpp/src/arrow/io/memory.cc
 delete mode 100644 cpp/src/arrow/io/memory.h
 delete mode 100644 cpp/src/arrow/io/memory_benchmark.cc
 delete mode 100644 cpp/src/arrow/io/memory_test.cc
 delete mode 100644 cpp/src/arrow/io/mman.h
 delete mode 100644 cpp/src/arrow/io/slow.cc
 delete mode 100644 cpp/src/arrow/io/slow.h
 delete mode 100644 cpp/src/arrow/io/test_common.cc
 delete mode 100644 cpp/src/arrow/io/test_common.h
 delete mode 100644 cpp/src/arrow/io/transform.cc
 delete mode 100644 cpp/src/arrow/io/transform.h
 delete mode 100644 cpp/src/arrow/io/type_fwd.h
 delete mode 100644 cpp/src/arrow/io/util_internal.h
 delete mode 100644 cpp/src/arrow/ipc/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/ipc/api.h
 delete mode 100644 cpp/src/arrow/ipc/dictionary.cc
 delete mode 100644 cpp/src/arrow/ipc/dictionary.h
 delete mode 100644 cpp/src/arrow/ipc/feather.cc
 delete mode 100644 cpp/src/arrow/ipc/feather.fbs
 delete mode 100644 cpp/src/arrow/ipc/feather.h
 delete mode 100644 cpp/src/arrow/ipc/feather_test.cc
 delete mode 100644 cpp/src/arrow/ipc/file_fuzz.cc
 delete mode 100644 cpp/src/arrow/ipc/file_to_stream.cc
 delete mode 100644 cpp/src/arrow/ipc/generate_fuzz_corpus.cc
 delete mode 100644 cpp/src/arrow/ipc/generate_tensor_fuzz_corpus.cc
 delete mode 100644 cpp/src/arrow/ipc/json_simple.cc
 delete mode 100644 cpp/src/arrow/ipc/json_simple.h
 delete mode 100644 cpp/src/arrow/ipc/json_simple_test.cc
 delete mode 100644 cpp/src/arrow/ipc/message.cc
 delete mode 100644 cpp/src/arrow/ipc/message.h
 delete mode 100644 cpp/src/arrow/ipc/metadata_internal.cc
 delete mode 100644 cpp/src/arrow/ipc/metadata_internal.h
 delete mode 100644 cpp/src/arrow/ipc/options.cc
 delete mode 100644 cpp/src/arrow/ipc/options.h
 delete mode 100644 cpp/src/arrow/ipc/read_write_benchmark.cc
 delete mode 100644 cpp/src/arrow/ipc/read_write_test.cc
 delete mode 100644 cpp/src/arrow/ipc/reader.cc
 delete mode 100644 cpp/src/arrow/ipc/reader.h
 delete mode 100644 cpp/src/arrow/ipc/stream_fuzz.cc
 delete mode 100644 cpp/src/arrow/ipc/stream_to_file.cc
 delete mode 100644 cpp/src/arrow/ipc/tensor_stream_fuzz.cc
 delete mode 100644 cpp/src/arrow/ipc/tensor_test.cc
 delete mode 100644 cpp/src/arrow/ipc/test_common.cc
 delete mode 100644 cpp/src/arrow/ipc/test_common.h
 delete mode 100644 cpp/src/arrow/ipc/type_fwd.h
 delete mode 100644 cpp/src/arrow/ipc/util.h
 delete mode 100644 cpp/src/arrow/ipc/writer.cc
 delete mode 100644 cpp/src/arrow/ipc/writer.h
 delete mode 100644 cpp/src/arrow/json/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/json/api.h
 delete mode 100644 cpp/src/arrow/json/arrow-json.pc.in
 delete mode 100644 cpp/src/arrow/json/chunked_builder.cc
 delete mode 100644 cpp/src/arrow/json/chunked_builder.h
 delete mode 100644 cpp/src/arrow/json/chunked_builder_test.cc
 delete mode 100644 cpp/src/arrow/json/chunker.cc
 delete mode 100644 cpp/src/arrow/json/chunker.h
 delete mode 100644 cpp/src/arrow/json/chunker_test.cc
 delete mode 100644 cpp/src/arrow/json/converter.cc
 delete mode 100644 cpp/src/arrow/json/converter.h
 delete mode 100644 cpp/src/arrow/json/converter_test.cc
 delete mode 100644 cpp/src/arrow/json/object_parser.cc
 delete mode 100644 cpp/src/arrow/json/object_parser.h
 delete mode 100644 cpp/src/arrow/json/object_writer.cc
 delete mode 100644 cpp/src/arrow/json/object_writer.h
 delete mode 100644 cpp/src/arrow/json/options.cc
 delete mode 100644 cpp/src/arrow/json/options.h
 delete mode 100644 cpp/src/arrow/json/parser.cc
 delete mode 100644 cpp/src/arrow/json/parser.h
 delete mode 100644 cpp/src/arrow/json/parser_benchmark.cc
 delete mode 100644 cpp/src/arrow/json/parser_test.cc
 delete mode 100644 cpp/src/arrow/json/rapidjson_defs.h
 delete mode 100644 cpp/src/arrow/json/reader.cc
 delete mode 100644 cpp/src/arrow/json/reader.h
 delete mode 100644 cpp/src/arrow/json/reader_test.cc
 delete mode 100644 cpp/src/arrow/json/test_common.h
 delete mode 100644 cpp/src/arrow/json/type_fwd.h
 delete mode 100644 cpp/src/arrow/memory_pool.cc
 delete mode 100644 cpp/src/arrow/memory_pool.h
 delete mode 100644 cpp/src/arrow/memory_pool_benchmark.cc
 delete mode 100644 cpp/src/arrow/memory_pool_test.cc
 delete mode 100644 cpp/src/arrow/memory_pool_test.h
 delete mode 100644 cpp/src/arrow/pch.h
 delete mode 100644 cpp/src/arrow/pretty_print.cc
 delete mode 100644 cpp/src/arrow/pretty_print.h
 delete mode 100644 cpp/src/arrow/pretty_print_test.cc
 delete mode 100644 cpp/src/arrow/public_api_test.cc
 delete mode 100644 cpp/src/arrow/python/ArrowPythonConfig.cmake.in
 delete mode 100644 cpp/src/arrow/python/ArrowPythonFlightConfig.cmake.in
 delete mode 100644 cpp/src/arrow/python/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/python/api.h
 delete mode 100644 cpp/src/arrow/python/arrow-python-flight.pc.in
 delete mode 100644 cpp/src/arrow/python/arrow-python.pc.in
 delete mode 100644 cpp/src/arrow/python/arrow_to_pandas.cc
 delete mode 100644 cpp/src/arrow/python/arrow_to_pandas.h
 delete mode 100644 cpp/src/arrow/python/benchmark.cc
 delete mode 100644 cpp/src/arrow/python/benchmark.h
 delete mode 100644 cpp/src/arrow/python/common.cc
 delete mode 100644 cpp/src/arrow/python/common.h
 delete mode 100644 cpp/src/arrow/python/datetime.cc
 delete mode 100644 cpp/src/arrow/python/datetime.h
 delete mode 100644 cpp/src/arrow/python/decimal.cc
 delete mode 100644 cpp/src/arrow/python/decimal.h
 delete mode 100644 cpp/src/arrow/python/deserialize.cc
 delete mode 100644 cpp/src/arrow/python/deserialize.h
 delete mode 100644 cpp/src/arrow/python/extension_type.cc
 delete mode 100644 cpp/src/arrow/python/extension_type.h
 delete mode 100644 cpp/src/arrow/python/filesystem.cc
 delete mode 100644 cpp/src/arrow/python/filesystem.h
 delete mode 100644 cpp/src/arrow/python/flight.cc
 delete mode 100644 cpp/src/arrow/python/flight.h
 delete mode 100644 cpp/src/arrow/python/helpers.cc
 delete mode 100644 cpp/src/arrow/python/helpers.h
 delete mode 100644 cpp/src/arrow/python/inference.cc
 delete mode 100644 cpp/src/arrow/python/inference.h
 delete mode 100644 cpp/src/arrow/python/init.cc
 delete mode 100644 cpp/src/arrow/python/init.h
 delete mode 100644 cpp/src/arrow/python/io.cc
 delete mode 100644 cpp/src/arrow/python/io.h
 delete mode 100644 cpp/src/arrow/python/ipc.cc
 delete mode 100644 cpp/src/arrow/python/ipc.h
 delete mode 100644 cpp/src/arrow/python/iterators.h
 delete mode 100644 cpp/src/arrow/python/numpy_convert.cc
 delete mode 100644 cpp/src/arrow/python/numpy_convert.h
 delete mode 100644 cpp/src/arrow/python/numpy_internal.h
 delete mode 100644 cpp/src/arrow/python/numpy_interop.h
 delete mode 100644 cpp/src/arrow/python/numpy_to_arrow.cc
 delete mode 100644 cpp/src/arrow/python/numpy_to_arrow.h
 delete mode 100644 cpp/src/arrow/python/pch.h
 delete mode 100644 cpp/src/arrow/python/platform.h
 delete mode 100644 cpp/src/arrow/python/pyarrow.cc
 delete mode 100644 cpp/src/arrow/python/pyarrow.h
 delete mode 100644 cpp/src/arrow/python/pyarrow_api.h
 delete mode 100644 cpp/src/arrow/python/pyarrow_lib.h
 delete mode 100644 cpp/src/arrow/python/python_test.cc
 delete mode 100644 cpp/src/arrow/python/python_to_arrow.cc
 delete mode 100644 cpp/src/arrow/python/python_to_arrow.h
 delete mode 100644 cpp/src/arrow/python/serialize.cc
 delete mode 100644 cpp/src/arrow/python/serialize.h
 delete mode 100644 cpp/src/arrow/python/type_traits.h
 delete mode 100644 cpp/src/arrow/python/util/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/python/util/test_main.cc
 delete mode 100644 cpp/src/arrow/python/visibility.h
 delete mode 100644 cpp/src/arrow/record_batch.cc
 delete mode 100644 cpp/src/arrow/record_batch.h
 delete mode 100644 cpp/src/arrow/record_batch_test.cc
 delete mode 100644 cpp/src/arrow/result.cc
 delete mode 100644 cpp/src/arrow/result.h
 delete mode 100644 cpp/src/arrow/result_internal.h
 delete mode 100644 cpp/src/arrow/result_test.cc
 delete mode 100644 cpp/src/arrow/scalar.cc
 delete mode 100644 cpp/src/arrow/scalar.h
 delete mode 100644 cpp/src/arrow/scalar_test.cc
 delete mode 100644 cpp/src/arrow/sparse_tensor.cc
 delete mode 100644 cpp/src/arrow/sparse_tensor.h
 delete mode 100644 cpp/src/arrow/sparse_tensor_test.cc
 delete mode 100644 cpp/src/arrow/status.cc
 delete mode 100644 cpp/src/arrow/status.h
 delete mode 100644 cpp/src/arrow/status_test.cc
 delete mode 100644 cpp/src/arrow/stl.h
 delete mode 100644 cpp/src/arrow/stl_allocator.h
 delete mode 100644 cpp/src/arrow/stl_iterator.h
 delete mode 100644 cpp/src/arrow/stl_iterator_test.cc
 delete mode 100644 cpp/src/arrow/stl_test.cc
 delete mode 100644 cpp/src/arrow/symbols.map
 delete mode 100644 cpp/src/arrow/table.cc
 delete mode 100644 cpp/src/arrow/table.h
 delete mode 100644 cpp/src/arrow/table_builder.cc
 delete mode 100644 cpp/src/arrow/table_builder.h
 delete mode 100644 cpp/src/arrow/table_builder_test.cc
 delete mode 100644 cpp/src/arrow/table_test.cc
 delete mode 100644 cpp/src/arrow/tensor.cc
 delete mode 100644 cpp/src/arrow/tensor.h
 delete mode 100644 cpp/src/arrow/tensor/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/tensor/converter.h
 delete mode 100644 cpp/src/arrow/tensor/converter_internal.h
 delete mode 100644 cpp/src/arrow/tensor/coo_converter.cc
 delete mode 100644 cpp/src/arrow/tensor/csf_converter.cc
 delete mode 100644 cpp/src/arrow/tensor/csx_converter.cc
 delete mode 100644 cpp/src/arrow/tensor/tensor_conversion_benchmark.cc
 delete mode 100644 cpp/src/arrow/tensor_test.cc
 delete mode 100644 cpp/src/arrow/testing/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/testing/extension_type.h
 delete mode 100644 cpp/src/arrow/testing/future_util.h
 delete mode 100644 cpp/src/arrow/testing/generator.cc
 delete mode 100644 cpp/src/arrow/testing/generator.h
 delete mode 100644 cpp/src/arrow/testing/gtest_common.h
 delete mode 100644 cpp/src/arrow/testing/gtest_compat.h
 delete mode 100644 cpp/src/arrow/testing/gtest_util.cc
 delete mode 100644 cpp/src/arrow/testing/gtest_util.h
 delete mode 100644 cpp/src/arrow/testing/json_integration.cc
 delete mode 100644 cpp/src/arrow/testing/json_integration.h
 delete mode 100644 cpp/src/arrow/testing/json_integration_test.cc
 delete mode 100644 cpp/src/arrow/testing/json_internal.cc
 delete mode 100644 cpp/src/arrow/testing/json_internal.h
 delete mode 100644 cpp/src/arrow/testing/macros.h
 delete mode 100644 cpp/src/arrow/testing/pch.h
 delete mode 100644 cpp/src/arrow/testing/random.cc
 delete mode 100644 cpp/src/arrow/testing/random.h
 delete mode 100644 cpp/src/arrow/testing/random_test.cc
 delete mode 100644 cpp/src/arrow/testing/util.cc
 delete mode 100644 cpp/src/arrow/testing/util.h
 delete mode 100644 cpp/src/arrow/testing/visibility.h
 delete mode 100644 cpp/src/arrow/type.cc
 delete mode 100644 cpp/src/arrow/type.h
 delete mode 100644 cpp/src/arrow/type_benchmark.cc
 delete mode 100644 cpp/src/arrow/type_fwd.h
 delete mode 100644 cpp/src/arrow/type_test.cc
 delete mode 100644 cpp/src/arrow/type_traits.h
 delete mode 100644 cpp/src/arrow/util/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/util/algorithm.h
 delete mode 100644 cpp/src/arrow/util/align_util.h
 delete mode 100644 cpp/src/arrow/util/align_util_test.cc
 delete mode 100644 cpp/src/arrow/util/async_generator.h
 delete mode 100644 cpp/src/arrow/util/async_generator_test.cc
 delete mode 100644 cpp/src/arrow/util/atomic_shared_ptr.h
 delete mode 100644 cpp/src/arrow/util/base64.h
 delete mode 100644 cpp/src/arrow/util/basic_decimal.cc
 delete mode 100644 cpp/src/arrow/util/basic_decimal.h
 delete mode 100644 cpp/src/arrow/util/benchmark_main.cc
 delete mode 100644 cpp/src/arrow/util/benchmark_util.h
 delete mode 100644 cpp/src/arrow/util/bit_block_counter.cc
 delete mode 100644 cpp/src/arrow/util/bit_block_counter.h
 delete mode 100644 cpp/src/arrow/util/bit_block_counter_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/bit_block_counter_test.cc
 delete mode 100644 cpp/src/arrow/util/bit_run_reader.cc
 delete mode 100644 cpp/src/arrow/util/bit_run_reader.h
 delete mode 100644 cpp/src/arrow/util/bit_stream_utils.h
 delete mode 100644 cpp/src/arrow/util/bit_util.cc
 delete mode 100644 cpp/src/arrow/util/bit_util.h
 delete mode 100644 cpp/src/arrow/util/bit_util_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/bit_util_test.cc
 delete mode 100644 cpp/src/arrow/util/bitmap.cc
 delete mode 100644 cpp/src/arrow/util/bitmap.h
 delete mode 100644 cpp/src/arrow/util/bitmap_builders.cc
 delete mode 100644 cpp/src/arrow/util/bitmap_builders.h
 delete mode 100644 cpp/src/arrow/util/bitmap_generate.h
 delete mode 100644 cpp/src/arrow/util/bitmap_ops.cc
 delete mode 100644 cpp/src/arrow/util/bitmap_ops.h
 delete mode 100644 cpp/src/arrow/util/bitmap_reader.h
 delete mode 100644 cpp/src/arrow/util/bitmap_visit.h
 delete mode 100644 cpp/src/arrow/util/bitmap_writer.h
 delete mode 100644 cpp/src/arrow/util/bitset_stack.h
 delete mode 100644 cpp/src/arrow/util/bpacking.cc
 delete mode 100644 cpp/src/arrow/util/bpacking.h
 delete mode 100644 cpp/src/arrow/util/bpacking_avx2.cc
 delete mode 100644 cpp/src/arrow/util/bpacking_avx2.h
 delete mode 100644 cpp/src/arrow/util/bpacking_avx2_generated.h
 delete mode 100644 cpp/src/arrow/util/bpacking_avx512.cc
 delete mode 100644 cpp/src/arrow/util/bpacking_avx512.h
 delete mode 100644 cpp/src/arrow/util/bpacking_avx512_generated.h
 delete mode 100644 cpp/src/arrow/util/bpacking_default.h
 delete mode 100644 cpp/src/arrow/util/bpacking_neon.cc
 delete mode 100644 cpp/src/arrow/util/bpacking_neon.h
 delete mode 100644 cpp/src/arrow/util/bpacking_simd128_generated.h
 delete mode 100644 cpp/src/arrow/util/bpacking_simd256_generated.h
 delete mode 100644 cpp/src/arrow/util/bpacking_simd512_generated.h
 delete mode 100644 cpp/src/arrow/util/bpacking_simd_codegen.py
 delete mode 100644 cpp/src/arrow/util/bpacking_simd_internal.h
 delete mode 100644 cpp/src/arrow/util/byte_stream_split.h
 delete mode 100644 cpp/src/arrow/util/cache_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/cache_internal.h
 delete mode 100644 cpp/src/arrow/util/cache_test.cc
 delete mode 100644 cpp/src/arrow/util/cancel.cc
 delete mode 100644 cpp/src/arrow/util/cancel.h
 delete mode 100644 cpp/src/arrow/util/cancel_test.cc
 delete mode 100644 cpp/src/arrow/util/checked_cast.h
 delete mode 100644 cpp/src/arrow/util/checked_cast_test.cc
 delete mode 100644 cpp/src/arrow/util/compare.h
 delete mode 100644 cpp/src/arrow/util/compiler_util.h
 delete mode 100644 cpp/src/arrow/util/compression.cc
 delete mode 100644 cpp/src/arrow/util/compression.h
 delete mode 100644 cpp/src/arrow/util/compression_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/compression_brotli.cc
 delete mode 100644 cpp/src/arrow/util/compression_bz2.cc
 delete mode 100644 cpp/src/arrow/util/compression_internal.h
 delete mode 100644 cpp/src/arrow/util/compression_lz4.cc
 delete mode 100644 cpp/src/arrow/util/compression_snappy.cc
 delete mode 100644 cpp/src/arrow/util/compression_test.cc
 delete mode 100644 cpp/src/arrow/util/compression_zlib.cc
 delete mode 100644 cpp/src/arrow/util/compression_zstd.cc
 delete mode 100644 cpp/src/arrow/util/concurrent_map.h
 delete mode 100644 cpp/src/arrow/util/config.h.cmake
 delete mode 100644 cpp/src/arrow/util/converter.h
 delete mode 100644 cpp/src/arrow/util/cpu_info.cc
 delete mode 100644 cpp/src/arrow/util/cpu_info.h
 delete mode 100644 cpp/src/arrow/util/decimal.cc
 delete mode 100644 cpp/src/arrow/util/decimal.h
 delete mode 100644 cpp/src/arrow/util/decimal_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/decimal_test.cc
 delete mode 100644 cpp/src/arrow/util/delimiting.cc
 delete mode 100644 cpp/src/arrow/util/delimiting.h
 delete mode 100644 cpp/src/arrow/util/dispatch.h
 delete mode 100644 cpp/src/arrow/util/double_conversion.h
 delete mode 100644 cpp/src/arrow/util/endian.h
 delete mode 100644 cpp/src/arrow/util/formatting.cc
 delete mode 100644 cpp/src/arrow/util/formatting.h
 delete mode 100644 cpp/src/arrow/util/formatting_util_test.cc
 delete mode 100644 cpp/src/arrow/util/functional.h
 delete mode 100644 cpp/src/arrow/util/future.cc
 delete mode 100644 cpp/src/arrow/util/future.h
 delete mode 100644 cpp/src/arrow/util/future_iterator.h
 delete mode 100644 cpp/src/arrow/util/future_test.cc
 delete mode 100644 cpp/src/arrow/util/hash_util.h
 delete mode 100644 cpp/src/arrow/util/hashing.h
 delete mode 100644 cpp/src/arrow/util/hashing_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/hashing_test.cc
 delete mode 100644 cpp/src/arrow/util/int128_internal.h
 delete mode 100644 cpp/src/arrow/util/int_util.cc
 delete mode 100644 cpp/src/arrow/util/int_util.h
 delete mode 100644 cpp/src/arrow/util/int_util_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/int_util_internal.h
 delete mode 100644 cpp/src/arrow/util/int_util_test.cc
 delete mode 100644 cpp/src/arrow/util/io_util.cc
 delete mode 100644 cpp/src/arrow/util/io_util.h
 delete mode 100644 cpp/src/arrow/util/io_util_test.cc
 delete mode 100644 cpp/src/arrow/util/io_util_test.manifest
 delete mode 100644 cpp/src/arrow/util/io_util_test.rc
 delete mode 100644 cpp/src/arrow/util/iterator.h
 delete mode 100644 cpp/src/arrow/util/iterator_test.cc
 delete mode 100644 cpp/src/arrow/util/key_value_metadata.cc
 delete mode 100644 cpp/src/arrow/util/key_value_metadata.h
 delete mode 100644 cpp/src/arrow/util/key_value_metadata_test.cc
 delete mode 100644 cpp/src/arrow/util/logging.cc
 delete mode 100644 cpp/src/arrow/util/logging.h
 delete mode 100644 cpp/src/arrow/util/logging_test.cc
 delete mode 100644 cpp/src/arrow/util/machine_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/macros.h
 delete mode 100644 cpp/src/arrow/util/make_unique.h
 delete mode 100644 cpp/src/arrow/util/map.h
 delete mode 100644 cpp/src/arrow/util/memory.cc
 delete mode 100644 cpp/src/arrow/util/memory.h
 delete mode 100644 cpp/src/arrow/util/mutex.cc
 delete mode 100644 cpp/src/arrow/util/mutex.h
 delete mode 100644 cpp/src/arrow/util/optional.h
 delete mode 100644 cpp/src/arrow/util/parallel.h
 delete mode 100644 cpp/src/arrow/util/print.h
 delete mode 100644 cpp/src/arrow/util/queue.h
 delete mode 100644 cpp/src/arrow/util/queue_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/queue_test.cc
 delete mode 100644 cpp/src/arrow/util/range.h
 delete mode 100644 cpp/src/arrow/util/range_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/range_test.cc
 delete mode 100644 cpp/src/arrow/util/rle_encoding.h
 delete mode 100644 cpp/src/arrow/util/rle_encoding_test.cc
 delete mode 100644 cpp/src/arrow/util/simd.h
 delete mode 100644 cpp/src/arrow/util/sort.h
 delete mode 100644 cpp/src/arrow/util/spaced.h
 delete mode 100644 cpp/src/arrow/util/stl_util_test.cc
 delete mode 100644 cpp/src/arrow/util/stopwatch.h
 delete mode 100644 cpp/src/arrow/util/string.cc
 delete mode 100644 cpp/src/arrow/util/string.h
 delete mode 100644 cpp/src/arrow/util/string_builder.cc
 delete mode 100644 cpp/src/arrow/util/string_builder.h
 delete mode 100644 cpp/src/arrow/util/string_test.cc
 delete mode 100644 cpp/src/arrow/util/string_view.h
 delete mode 100644 cpp/src/arrow/util/task_group.cc
 delete mode 100644 cpp/src/arrow/util/task_group.h
 delete mode 100644 cpp/src/arrow/util/task_group_test.cc
 delete mode 100644 cpp/src/arrow/util/tdigest.cc
 delete mode 100644 cpp/src/arrow/util/tdigest.h
 delete mode 100644 cpp/src/arrow/util/tdigest_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/tdigest_test.cc
 delete mode 100644 cpp/src/arrow/util/test_common.cc
 delete mode 100644 cpp/src/arrow/util/test_common.h
 delete mode 100644 cpp/src/arrow/util/thread_pool.cc
 delete mode 100644 cpp/src/arrow/util/thread_pool.h
 delete mode 100644 cpp/src/arrow/util/thread_pool_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/thread_pool_test.cc
 delete mode 100644 cpp/src/arrow/util/time.cc
 delete mode 100644 cpp/src/arrow/util/time.h
 delete mode 100644 cpp/src/arrow/util/time_test.cc
 delete mode 100644 cpp/src/arrow/util/trie.cc
 delete mode 100644 cpp/src/arrow/util/trie.h
 delete mode 100644 cpp/src/arrow/util/trie_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/trie_test.cc
 delete mode 100644 cpp/src/arrow/util/type_fwd.h
 delete mode 100644 cpp/src/arrow/util/type_traits.h
 delete mode 100644 cpp/src/arrow/util/ubsan.h
 delete mode 100644 cpp/src/arrow/util/uri.cc
 delete mode 100644 cpp/src/arrow/util/uri.h
 delete mode 100644 cpp/src/arrow/util/uri_test.cc
 delete mode 100644 cpp/src/arrow/util/utf8.cc
 delete mode 100644 cpp/src/arrow/util/utf8.h
 delete mode 100644 cpp/src/arrow/util/utf8_util_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/utf8_util_test.cc
 delete mode 100644 cpp/src/arrow/util/value_parsing.cc
 delete mode 100644 cpp/src/arrow/util/value_parsing.h
 delete mode 100644 cpp/src/arrow/util/value_parsing_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/value_parsing_test.cc
 delete mode 100644 cpp/src/arrow/util/variant.h
 delete mode 100644 cpp/src/arrow/util/variant_benchmark.cc
 delete mode 100644 cpp/src/arrow/util/variant_test.cc
 delete mode 100644 cpp/src/arrow/util/vector.h
 delete mode 100644 cpp/src/arrow/util/visibility.h
 delete mode 100644 cpp/src/arrow/util/windows_compatibility.h
 delete mode 100644 cpp/src/arrow/util/windows_fixup.h
 delete mode 100644 cpp/src/arrow/vendored/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/vendored/ProducerConsumerQueue.h
 delete mode 100644 cpp/src/arrow/vendored/base64.cpp
 delete mode 100644 cpp/src/arrow/vendored/datetime.h
 delete mode 100644 cpp/src/arrow/vendored/datetime/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/vendored/datetime/README.md
 delete mode 100644 cpp/src/arrow/vendored/datetime/date.h
 delete mode 100644 cpp/src/arrow/vendored/datetime/ios.h
 delete mode 100644 cpp/src/arrow/vendored/datetime/ios.mm
 delete mode 100644 cpp/src/arrow/vendored/datetime/tz.cpp
 delete mode 100644 cpp/src/arrow/vendored/datetime/tz.h
 delete mode 100644 cpp/src/arrow/vendored/datetime/tz_private.h
 delete mode 100644 cpp/src/arrow/vendored/datetime/visibility.h
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/.gitignore
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/CMakeLists.txt
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/README.md
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/bignum-dtoa.cc
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/bignum-dtoa.h
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/bignum.cc
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/bignum.h
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/cached-powers.cc
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/cached-powers.h
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/diy-fp.cc
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/diy-fp.h
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/double-conversion.cc
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/double-conversion.h
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/fast-dtoa.cc
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/fast-dtoa.h
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/fixed-dtoa.cc
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/fixed-dtoa.h
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/ieee.h
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/strtod.cc
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/strtod.h
 delete mode 100644 cpp/src/arrow/vendored/double-conversion/utils.h
 delete mode 100644 cpp/src/arrow/vendored/fast_float/README.md
 delete mode 100644 cpp/src/arrow/vendored/fast_float/ascii_number.h
 delete mode 100644 cpp/src/arrow/vendored/fast_float/decimal_to_binary.h
 delete mode 100644 cpp/src/arrow/vendored/fast_float/fast_float.h
 delete mode 100644 cpp/src/arrow/vendored/fast_float/fast_table.h
 delete mode 100644 cpp/src/arrow/vendored/fast_float/float_common.h
 delete mode 100644 cpp/src/arrow/vendored/fast_float/parse_number.h
 delete mode 100644 cpp/src/arrow/vendored/fast_float/simple_decimal_conversion.h
 delete mode 100644 cpp/src/arrow/vendored/musl/README.md
 delete mode 100644 cpp/src/arrow/vendored/musl/strptime.c
 delete mode 100644 cpp/src/arrow/vendored/optional.hpp
 delete mode 100644 cpp/src/arrow/vendored/portable-snippets/README.md
 delete mode 100644 cpp/src/arrow/vendored/portable-snippets/safe-math.h
 delete mode 100644 cpp/src/arrow/vendored/string_view.hpp
 delete mode 100644 cpp/src/arrow/vendored/strptime.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/README.md
 delete mode 100644 cpp/src/arrow/vendored/uriparser/Uri.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriBase.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriCommon.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriCommon.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriCompare.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriDefsAnsi.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriDefsConfig.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriDefsUnicode.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriEscape.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriFile.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriIp4.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriIp4.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriIp4Base.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriIp4Base.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriMemory.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriMemory.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriNormalize.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriNormalizeBase.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriNormalizeBase.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriParse.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriParseBase.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriParseBase.h
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriQuery.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriRecompose.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriResolve.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/UriShorten.c
 delete mode 100644 cpp/src/arrow/vendored/uriparser/config.h
 delete mode 100644 cpp/src/arrow/vendored/utfcpp/README.md
 delete mode 100644 cpp/src/arrow/vendored/utfcpp/checked.h
 delete mode 100644 cpp/src/arrow/vendored/utfcpp/core.h
 delete mode 100644 cpp/src/arrow/vendored/utfcpp/cpp11.h
 delete mode 100644 cpp/src/arrow/vendored/xxhash.h
 delete mode 100644 cpp/src/arrow/vendored/xxhash/README.md
 delete mode 100644 cpp/src/arrow/vendored/xxhash/xxhash.c
 delete mode 100644 cpp/src/arrow/vendored/xxhash/xxhash.h
 delete mode 100644 cpp/src/arrow/visitor.cc
 delete mode 100644 cpp/src/arrow/visitor.h
 delete mode 100644 cpp/src/arrow/visitor_inline.h
 delete mode 100644 cpp/src/gandiva/CMakeLists.txt
 delete mode 100644 cpp/src/gandiva/GandivaConfig.cmake.in
 delete mode 100644 cpp/src/gandiva/annotator.cc
 delete mode 100644 cpp/src/gandiva/annotator.h
 delete mode 100644 cpp/src/gandiva/annotator_test.cc
 delete mode 100644 cpp/src/gandiva/arrow.h
 delete mode 100644 cpp/src/gandiva/basic_decimal_scalar.h
 delete mode 100644 cpp/src/gandiva/bitmap_accumulator.cc
 delete mode 100644 cpp/src/gandiva/bitmap_accumulator.h
 delete mode 100644 cpp/src/gandiva/bitmap_accumulator_test.cc
 delete mode 100644 cpp/src/gandiva/cache.cc
 delete mode 100644 cpp/src/gandiva/cache.h
 delete mode 100644 cpp/src/gandiva/cast_time.cc
 delete mode 100644 cpp/src/gandiva/compiled_expr.h
 delete mode 100644 cpp/src/gandiva/condition.h
 delete mode 100644 cpp/src/gandiva/configuration.cc
 delete mode 100644 cpp/src/gandiva/configuration.h
 delete mode 100644 cpp/src/gandiva/context_helper.cc
 delete mode 100644 cpp/src/gandiva/date_utils.cc
 delete mode 100644 cpp/src/gandiva/date_utils.h
 delete mode 100644 cpp/src/gandiva/decimal_ir.cc
 delete mode 100644 cpp/src/gandiva/decimal_ir.h
 delete mode 100644 cpp/src/gandiva/decimal_scalar.h
 delete mode 100644 cpp/src/gandiva/decimal_type_util.cc
 delete mode 100644 cpp/src/gandiva/decimal_type_util.h
 delete mode 100644 cpp/src/gandiva/decimal_type_util_test.cc
 delete mode 100644 cpp/src/gandiva/decimal_xlarge.cc
 delete mode 100644 cpp/src/gandiva/decimal_xlarge.h
 delete mode 100644 cpp/src/gandiva/dex.h
 delete mode 100644 cpp/src/gandiva/dex_visitor.h
 delete mode 100644 cpp/src/gandiva/engine.cc
 delete mode 100644 cpp/src/gandiva/engine.h
 delete mode 100644 cpp/src/gandiva/engine_llvm_test.cc
 delete mode 100644 cpp/src/gandiva/eval_batch.h
 delete mode 100644 cpp/src/gandiva/execution_context.h
 delete mode 100644 cpp/src/gandiva/exported_funcs.h
 delete mode 100644 cpp/src/gandiva/exported_funcs_registry.cc
 delete mode 100644 cpp/src/gandiva/exported_funcs_registry.h
 delete mode 100644 cpp/src/gandiva/expr_decomposer.cc
 delete mode 100644 cpp/src/gandiva/expr_decomposer.h
 delete mode 100644 cpp/src/gandiva/expr_decomposer_test.cc
 delete mode 100644 cpp/src/gandiva/expr_validator.cc
 delete mode 100644 cpp/src/gandiva/expr_validator.h
 delete mode 100644 cpp/src/gandiva/expression.cc
 delete mode 100644 cpp/src/gandiva/expression.h
 delete mode 100644 cpp/src/gandiva/expression_registry.cc
 delete mode 100644 cpp/src/gandiva/expression_registry.h
 delete mode 100644 cpp/src/gandiva/expression_registry_test.cc
 delete mode 100644 cpp/src/gandiva/field_descriptor.h
 delete mode 100644 cpp/src/gandiva/filter.cc
 delete mode 100644 cpp/src/gandiva/filter.h
 delete mode 100644 cpp/src/gandiva/formatting_utils.h
 delete mode 100644 cpp/src/gandiva/func_descriptor.h
 delete mode 100644 cpp/src/gandiva/function_holder.h
 delete mode 100644 cpp/src/gandiva/function_holder_registry.h
 delete mode 100644 cpp/src/gandiva/function_ir_builder.cc
 delete mode 100644 cpp/src/gandiva/function_ir_builder.h
 delete mode 100644 cpp/src/gandiva/function_registry.cc
 delete mode 100644 cpp/src/gandiva/function_registry.h
 delete mode 100644 cpp/src/gandiva/function_registry_arithmetic.cc
 delete mode 100644 cpp/src/gandiva/function_registry_arithmetic.h
 delete mode 100644 cpp/src/gandiva/function_registry_common.h
 delete mode 100644 cpp/src/gandiva/function_registry_datetime.cc
 delete mode 100644 cpp/src/gandiva/function_registry_datetime.h
 delete mode 100644 cpp/src/gandiva/function_registry_hash.cc
 delete mode 100644 cpp/src/gandiva/function_registry_hash.h
 delete mode 100644 cpp/src/gandiva/function_registry_math_ops.cc
 delete mode 100644 cpp/src/gandiva/function_registry_math_ops.h
 delete mode 100644 cpp/src/gandiva/function_registry_string.cc
 delete mode 100644 cpp/src/gandiva/function_registry_string.h
 delete mode 100644 cpp/src/gandiva/function_registry_test.cc
 delete mode 100644 cpp/src/gandiva/function_registry_timestamp_arithmetic.cc
 delete mode 100644 cpp/src/gandiva/function_registry_timestamp_arithmetic.h
 delete mode 100644 cpp/src/gandiva/function_signature.cc
 delete mode 100644 cpp/src/gandiva/function_signature.h
 delete mode 100644 cpp/src/gandiva/function_signature_test.cc
 delete mode 100644 cpp/src/gandiva/gandiva.pc.in
 delete mode 100644 cpp/src/gandiva/gandiva_aliases.h
 delete mode 100644 cpp/src/gandiva/gdv_function_stubs.cc
 delete mode 100644 cpp/src/gandiva/gdv_function_stubs.h
 delete mode 100644 cpp/src/gandiva/gdv_function_stubs_test.cc
 delete mode 100644 cpp/src/gandiva/hash_utils.cc
 delete mode 100644 cpp/src/gandiva/hash_utils.h
 delete mode 100644 cpp/src/gandiva/hash_utils_test.cc
 delete mode 100644 cpp/src/gandiva/in_holder.h
 delete mode 100644 cpp/src/gandiva/jni/CMakeLists.txt
 delete mode 100644 cpp/src/gandiva/jni/config_builder.cc
 delete mode 100644 cpp/src/gandiva/jni/config_holder.cc
 delete mode 100644 cpp/src/gandiva/jni/config_holder.h
 delete mode 100644 cpp/src/gandiva/jni/env_helper.h
 delete mode 100644 cpp/src/gandiva/jni/expression_registry_helper.cc
 delete mode 100644 cpp/src/gandiva/jni/id_to_module_map.h
 delete mode 100644 cpp/src/gandiva/jni/jni_common.cc
 delete mode 100644 cpp/src/gandiva/jni/module_holder.h
 delete mode 100644 cpp/src/gandiva/jni/symbols.map
 delete mode 100644 cpp/src/gandiva/like_holder.cc
 delete mode 100644 cpp/src/gandiva/like_holder.h
 delete mode 100644 cpp/src/gandiva/like_holder_test.cc
 delete mode 100644 cpp/src/gandiva/literal_holder.cc
 delete mode 100644 cpp/src/gandiva/literal_holder.h
 delete mode 100644 cpp/src/gandiva/llvm_generator.cc
 delete mode 100644 cpp/src/gandiva/llvm_generator.h
 delete mode 100644 cpp/src/gandiva/llvm_generator_test.cc
 delete mode 100644 cpp/src/gandiva/llvm_includes.h
 delete mode 100644 cpp/src/gandiva/llvm_types.cc
 delete mode 100644 cpp/src/gandiva/llvm_types.h
 delete mode 100644 cpp/src/gandiva/llvm_types_test.cc
 delete mode 100644 cpp/src/gandiva/local_bitmaps_holder.h
 delete mode 100644 cpp/src/gandiva/lru_cache.h
 delete mode 100644 cpp/src/gandiva/lru_cache_test.cc
 delete mode 100644 cpp/src/gandiva/lvalue.h
 delete mode 100644 cpp/src/gandiva/make_precompiled_bitcode.py
 delete mode 100644 cpp/src/gandiva/native_function.h
 delete mode 100644 cpp/src/gandiva/node.h
 delete mode 100644 cpp/src/gandiva/node_visitor.h
 delete mode 100644 cpp/src/gandiva/pch.h
 delete mode 100644 cpp/src/gandiva/precompiled/CMakeLists.txt
 delete mode 100644 cpp/src/gandiva/precompiled/arithmetic_ops.cc
 delete mode 100644 cpp/src/gandiva/precompiled/arithmetic_ops_test.cc
 delete mode 100644 cpp/src/gandiva/precompiled/bitmap.cc
 delete mode 100644 cpp/src/gandiva/precompiled/bitmap_test.cc
 delete mode 100644 cpp/src/gandiva/precompiled/decimal_ops.cc
 delete mode 100644 cpp/src/gandiva/precompiled/decimal_ops.h
 delete mode 100644 cpp/src/gandiva/precompiled/decimal_ops_test.cc
 delete mode 100644 cpp/src/gandiva/precompiled/decimal_wrapper.cc
 delete mode 100644 cpp/src/gandiva/precompiled/epoch_time_point.h
 delete mode 100644 cpp/src/gandiva/precompiled/epoch_time_point_test.cc
 delete mode 100644 cpp/src/gandiva/precompiled/extended_math_ops.cc
 delete mode 100644 cpp/src/gandiva/precompiled/extended_math_ops_test.cc
 delete mode 100644 cpp/src/gandiva/precompiled/hash.cc
 delete mode 100644 cpp/src/gandiva/precompiled/hash_test.cc
 delete mode 100644 cpp/src/gandiva/precompiled/print.cc
 delete mode 100644 cpp/src/gandiva/precompiled/string_ops.cc
 delete mode 100644 cpp/src/gandiva/precompiled/string_ops_test.cc
 delete mode 100644 cpp/src/gandiva/precompiled/testing.h
 delete mode 100644 cpp/src/gandiva/precompiled/time.cc
 delete mode 100644 cpp/src/gandiva/precompiled/time_constants.h
 delete mode 100644 cpp/src/gandiva/precompiled/time_fields.h
 delete mode 100644 cpp/src/gandiva/precompiled/time_test.cc
 delete mode 100644 cpp/src/gandiva/precompiled/timestamp_arithmetic.cc
 delete mode 100644 cpp/src/gandiva/precompiled/types.h
 delete mode 100644 cpp/src/gandiva/precompiled_bitcode.cc.in
 delete mode 100644 cpp/src/gandiva/projector.cc
 delete mode 100644 cpp/src/gandiva/projector.h
 delete mode 100644 cpp/src/gandiva/proto/Types.proto
 delete mode 100644 cpp/src/gandiva/random_generator_holder.cc
 delete mode 100644 cpp/src/gandiva/random_generator_holder.h
 delete mode 100644 cpp/src/gandiva/random_generator_holder_test.cc
 delete mode 100644 cpp/src/gandiva/regex_util.cc
 delete mode 100644 cpp/src/gandiva/regex_util.h
 delete mode 100644 cpp/src/gandiva/selection_vector.cc
 delete mode 100644 cpp/src/gandiva/selection_vector.h
 delete mode 100644 cpp/src/gandiva/selection_vector_impl.h
 delete mode 100644 cpp/src/gandiva/selection_vector_test.cc
 delete mode 100644 cpp/src/gandiva/simple_arena.h
 delete mode 100644 cpp/src/gandiva/simple_arena_test.cc
 delete mode 100644 cpp/src/gandiva/symbols.map
 delete mode 100644 cpp/src/gandiva/tests/CMakeLists.txt
 delete mode 100644 cpp/src/gandiva/tests/binary_test.cc
 delete mode 100644 cpp/src/gandiva/tests/boolean_expr_test.cc
 delete mode 100644 cpp/src/gandiva/tests/date_time_test.cc
 delete mode 100644 cpp/src/gandiva/tests/decimal_single_test.cc
 delete mode 100644 cpp/src/gandiva/tests/decimal_test.cc
 delete mode 100644 cpp/src/gandiva/tests/filter_project_test.cc
 delete mode 100644 cpp/src/gandiva/tests/filter_test.cc
 delete mode 100644 cpp/src/gandiva/tests/generate_data.h
 delete mode 100644 cpp/src/gandiva/tests/hash_test.cc
 delete mode 100644 cpp/src/gandiva/tests/huge_table_test.cc
 delete mode 100644 cpp/src/gandiva/tests/if_expr_test.cc
 delete mode 100644 cpp/src/gandiva/tests/in_expr_test.cc
 delete mode 100644 cpp/src/gandiva/tests/literal_test.cc
 delete mode 100644 cpp/src/gandiva/tests/micro_benchmarks.cc
 delete mode 100644 cpp/src/gandiva/tests/null_validity_test.cc
 delete mode 100644 cpp/src/gandiva/tests/projector_build_validation_test.cc
 delete mode 100644 cpp/src/gandiva/tests/projector_test.cc
 delete mode 100644 cpp/src/gandiva/tests/test_util.h
 delete mode 100644 cpp/src/gandiva/tests/timed_evaluate.h
 delete mode 100644 cpp/src/gandiva/tests/to_string_test.cc
 delete mode 100644 cpp/src/gandiva/tests/utf8_test.cc
 delete mode 100644 cpp/src/gandiva/to_date_holder.cc
 delete mode 100644 cpp/src/gandiva/to_date_holder.h
 delete mode 100644 cpp/src/gandiva/to_date_holder_test.cc
 delete mode 100644 cpp/src/gandiva/tree_expr_builder.cc
 delete mode 100644 cpp/src/gandiva/tree_expr_builder.h
 delete mode 100644 cpp/src/gandiva/tree_expr_test.cc
 delete mode 100644 cpp/src/gandiva/value_validity_pair.h
 delete mode 100644 cpp/src/gandiva/visibility.h
 delete mode 100644 cpp/src/generated/File_generated.h
 delete mode 100644 cpp/src/generated/Message_generated.h
 delete mode 100644 cpp/src/generated/Schema_generated.h
 delete mode 100644 cpp/src/generated/SparseTensor_generated.h
 delete mode 100644 cpp/src/generated/Tensor_generated.h
 delete mode 100644 cpp/src/generated/feather_generated.h
 delete mode 100644 cpp/src/generated/parquet_constants.cpp
 delete mode 100644 cpp/src/generated/parquet_constants.h
 delete mode 100644 cpp/src/generated/parquet_types.cpp
 delete mode 100644 cpp/src/generated/parquet_types.h
 delete mode 100644 cpp/src/jni/CMakeLists.txt
 delete mode 100644 cpp/src/jni/dataset/CMakeLists.txt
 delete mode 100644 cpp/src/jni/dataset/jni_util.cc
 delete mode 100644 cpp/src/jni/dataset/jni_util.h
 delete mode 100644 cpp/src/jni/dataset/jni_util_test.cc
 delete mode 100644 cpp/src/jni/dataset/jni_wrapper.cc
 delete mode 100644 cpp/src/jni/orc/CMakeLists.txt
 delete mode 100644 cpp/src/jni/orc/concurrent_map.h
 delete mode 100644 cpp/src/jni/orc/jni_wrapper.cpp
 delete mode 100644 cpp/src/parquet/CMakeLists.txt
 delete mode 100644 cpp/src/parquet/ParquetConfig.cmake.in
 delete mode 100644 cpp/src/parquet/README
 delete mode 100644 cpp/src/parquet/api/CMakeLists.txt
 delete mode 100644 cpp/src/parquet/api/io.h
 delete mode 100644 cpp/src/parquet/api/reader.h
 delete mode 100644 cpp/src/parquet/api/schema.h
 delete mode 100644 cpp/src/parquet/api/writer.h
 delete mode 100644 cpp/src/parquet/arrow/CMakeLists.txt
 delete mode 100644 cpp/src/parquet/arrow/arrow_reader_writer_test.cc
 delete mode 100644 cpp/src/parquet/arrow/arrow_schema_test.cc
 delete mode 100644 cpp/src/parquet/arrow/fuzz.cc
 delete mode 100644 cpp/src/parquet/arrow/generate_fuzz_corpus.cc
 delete mode 100644 cpp/src/parquet/arrow/path_internal.cc
 delete mode 100644 cpp/src/parquet/arrow/path_internal.h
 delete mode 100644 cpp/src/parquet/arrow/path_internal_test.cc
 delete mode 100644 cpp/src/parquet/arrow/reader.cc
 delete mode 100644 cpp/src/parquet/arrow/reader.h
 delete mode 100644 cpp/src/parquet/arrow/reader_internal.cc
 delete mode 100644 cpp/src/parquet/arrow/reader_internal.h
 delete mode 100644 cpp/src/parquet/arrow/reader_writer_benchmark.cc
 delete mode 100644 cpp/src/parquet/arrow/reconstruct_internal_test.cc
 delete mode 100644 cpp/src/parquet/arrow/schema.cc
 delete mode 100644 cpp/src/parquet/arrow/schema.h
 delete mode 100644 cpp/src/parquet/arrow/schema_internal.cc
 delete mode 100644 cpp/src/parquet/arrow/schema_internal.h
 delete mode 100644 cpp/src/parquet/arrow/test_util.h
 delete mode 100644 cpp/src/parquet/arrow/writer.cc
 delete mode 100644 cpp/src/parquet/arrow/writer.h
 delete mode 100644 cpp/src/parquet/bloom_filter.cc
 delete mode 100644 cpp/src/parquet/bloom_filter.h
 delete mode 100644 cpp/src/parquet/bloom_filter_test.cc
 delete mode 100644 cpp/src/parquet/column_io_benchmark.cc
 delete mode 100644 cpp/src/parquet/column_page.h
 delete mode 100644 cpp/src/parquet/column_reader.cc
 delete mode 100644 cpp/src/parquet/column_reader.h
 delete mode 100644 cpp/src/parquet/column_reader_test.cc
 delete mode 100644 cpp/src/parquet/column_scanner.cc
 delete mode 100644 cpp/src/parquet/column_scanner.h
 delete mode 100644 cpp/src/parquet/column_scanner_test.cc
 delete mode 100644 cpp/src/parquet/column_writer.cc
 delete mode 100644 cpp/src/parquet/column_writer.h
 delete mode 100644 cpp/src/parquet/column_writer_test.cc
 delete mode 100644 cpp/src/parquet/encoding.cc
 delete mode 100644 cpp/src/parquet/encoding.h
 delete mode 100644 cpp/src/parquet/encoding_benchmark.cc
 delete mode 100644 cpp/src/parquet/encoding_test.cc
 delete mode 100644 cpp/src/parquet/encryption/CMakeLists.txt
 delete mode 100644 cpp/src/parquet/encryption/crypto_factory.cc
 delete mode 100644 cpp/src/parquet/encryption/crypto_factory.h
 delete mode 100644 cpp/src/parquet/encryption/encryption.cc
 delete mode 100644 cpp/src/parquet/encryption/encryption.h
 delete mode 100644 cpp/src/parquet/encryption/encryption_internal.cc
 delete mode 100644 cpp/src/parquet/encryption/encryption_internal.h
 delete mode 100644 cpp/src/parquet/encryption/encryption_internal_nossl.cc
 delete mode 100644 cpp/src/parquet/encryption/file_key_material_store.h
 delete mode 100644 cpp/src/parquet/encryption/file_key_unwrapper.cc
 delete mode 100644 cpp/src/parquet/encryption/file_key_unwrapper.h
 delete mode 100644 cpp/src/parquet/encryption/file_key_wrapper.cc
 delete mode 100644 cpp/src/parquet/encryption/file_key_wrapper.h
 delete mode 100644 cpp/src/parquet/encryption/internal_file_decryptor.cc
 delete mode 100644 cpp/src/parquet/encryption/internal_file_decryptor.h
 delete mode 100644 cpp/src/parquet/encryption/internal_file_encryptor.cc
 delete mode 100644 cpp/src/parquet/encryption/internal_file_encryptor.h
 delete mode 100644 cpp/src/parquet/encryption/key_encryption_key.h
 delete mode 100644 cpp/src/parquet/encryption/key_management_test.cc
 delete mode 100644 cpp/src/parquet/encryption/key_material.cc
 delete mode 100644 cpp/src/parquet/encryption/key_material.h
 delete mode 100644 cpp/src/parquet/encryption/key_metadata.cc
 delete mode 100644 cpp/src/parquet/encryption/key_metadata.h
 delete mode 100644 cpp/src/parquet/encryption/key_metadata_test.cc
 delete mode 100644 cpp/src/parquet/encryption/key_toolkit.cc
 delete mode 100644 cpp/src/parquet/encryption/key_toolkit.h
 delete mode 100644 cpp/src/parquet/encryption/key_toolkit_internal.cc
 delete mode 100644 cpp/src/parquet/encryption/key_toolkit_internal.h
 delete mode 100644 cpp/src/parquet/encryption/key_wrapping_test.cc
 delete mode 100644 cpp/src/parquet/encryption/kms_client.cc
 delete mode 100644 cpp/src/parquet/encryption/kms_client.h
 delete mode 100644 cpp/src/parquet/encryption/kms_client_factory.h
 delete mode 100644 cpp/src/parquet/encryption/local_wrap_kms_client.cc
 delete mode 100644 cpp/src/parquet/encryption/local_wrap_kms_client.h
 delete mode 100644 cpp/src/parquet/encryption/properties_test.cc
 delete mode 100644 cpp/src/parquet/encryption/read_configurations_test.cc
 delete mode 100644 cpp/src/parquet/encryption/test_encryption_util.cc
 delete mode 100644 cpp/src/parquet/encryption/test_encryption_util.h
 delete mode 100644 cpp/src/parquet/encryption/test_in_memory_kms.cc
 delete mode 100644 cpp/src/parquet/encryption/test_in_memory_kms.h
 delete mode 100644 cpp/src/parquet/encryption/two_level_cache_with_expiration.h
 delete mode 100644 cpp/src/parquet/encryption/two_level_cache_with_expiration_test.cc
 delete mode 100644 cpp/src/parquet/encryption/write_configurations_test.cc
 delete mode 100644 cpp/src/parquet/exception.cc
 delete mode 100644 cpp/src/parquet/exception.h
 delete mode 100644 cpp/src/parquet/file_deserialize_test.cc
 delete mode 100644 cpp/src/parquet/file_reader.cc
 delete mode 100644 cpp/src/parquet/file_reader.h
 delete mode 100644 cpp/src/parquet/file_serialize_test.cc
 delete mode 100644 cpp/src/parquet/file_writer.cc
 delete mode 100644 cpp/src/parquet/file_writer.h
 delete mode 100644 cpp/src/parquet/hasher.h
 delete mode 100644 cpp/src/parquet/level_comparison.cc
 delete mode 100644 cpp/src/parquet/level_comparison.h
 delete mode 100644 cpp/src/parquet/level_comparison_avx2.cc
 delete mode 100644 cpp/src/parquet/level_comparison_inc.h
 delete mode 100644 cpp/src/parquet/level_conversion.cc
 delete mode 100644 cpp/src/parquet/level_conversion.h
 delete mode 100644 cpp/src/parquet/level_conversion_benchmark.cc
 delete mode 100644 cpp/src/parquet/level_conversion_bmi2.cc
 delete mode 100644 cpp/src/parquet/level_conversion_inc.h
 delete mode 100644 cpp/src/parquet/level_conversion_test.cc
 delete mode 100644 cpp/src/parquet/metadata.cc
 delete mode 100644 cpp/src/parquet/metadata.h
 delete mode 100644 cpp/src/parquet/metadata_test.cc
 delete mode 100644 cpp/src/parquet/murmur3.cc
 delete mode 100644 cpp/src/parquet/murmur3.h
 delete mode 100644 cpp/src/parquet/parquet.pc.in
 delete mode 100644 cpp/src/parquet/parquet.thrift
 delete mode 100644 cpp/src/parquet/parquet_version.h.in
 delete mode 100644 cpp/src/parquet/pch.h
 delete mode 100644 cpp/src/parquet/platform.cc
 delete mode 100644 cpp/src/parquet/platform.h
 delete mode 100644 cpp/src/parquet/printer.cc
 delete mode 100644 cpp/src/parquet/printer.h
 delete mode 100644 cpp/src/parquet/properties.cc
 delete mode 100644 cpp/src/parquet/properties.h
 delete mode 100644 cpp/src/parquet/properties_test.cc
 delete mode 100644 cpp/src/parquet/public_api_test.cc
 delete mode 100644 cpp/src/parquet/reader_test.cc
 delete mode 100644 cpp/src/parquet/schema.cc
 delete mode 100644 cpp/src/parquet/schema.h
 delete mode 100644 cpp/src/parquet/schema_internal.h
 delete mode 100644 cpp/src/parquet/schema_test.cc
 delete mode 100644 cpp/src/parquet/statistics.cc
 delete mode 100644 cpp/src/parquet/statistics.h
 delete mode 100644 cpp/src/parquet/statistics_test.cc
 delete mode 100644 cpp/src/parquet/stream_reader.cc
 delete mode 100644 cpp/src/parquet/stream_reader.h
 delete mode 100644 cpp/src/parquet/stream_reader_test.cc
 delete mode 100644 cpp/src/parquet/stream_writer.cc
 delete mode 100644 cpp/src/parquet/stream_writer.h
 delete mode 100644 cpp/src/parquet/stream_writer_test.cc
 delete mode 100644 cpp/src/parquet/symbols.map
 delete mode 100644 cpp/src/parquet/test_util.cc
 delete mode 100644 cpp/src/parquet/test_util.h
 delete mode 100644 cpp/src/parquet/thrift_internal.h
 delete mode 100644 cpp/src/parquet/type_fwd.h
 delete mode 100644 cpp/src/parquet/types.cc
 delete mode 100644 cpp/src/parquet/types.h
 delete mode 100644 cpp/src/parquet/types_test.cc
 delete mode 100644 cpp/src/parquet/windows_compatibility.h
 delete mode 100644 cpp/src/plasma/.gitignore
 delete mode 100644 cpp/src/plasma/CMakeLists.txt
 delete mode 100644 cpp/src/plasma/PlasmaConfig.cmake.in
 delete mode 100644 cpp/src/plasma/client.cc
 delete mode 100644 cpp/src/plasma/client.h
 delete mode 100644 cpp/src/plasma/common.cc
 delete mode 100644 cpp/src/plasma/common.fbs
 delete mode 100644 cpp/src/plasma/common.h
 delete mode 100644 cpp/src/plasma/common_generated.h
 delete mode 100644 cpp/src/plasma/compat.h
 delete mode 100644 cpp/src/plasma/dlmalloc.cc
 delete mode 100644 cpp/src/plasma/events.cc
 delete mode 100644 cpp/src/plasma/events.h
 delete mode 100644 cpp/src/plasma/eviction_policy.cc
 delete mode 100644 cpp/src/plasma/eviction_policy.h
 delete mode 100644 cpp/src/plasma/external_store.cc
 delete mode 100644 cpp/src/plasma/external_store.h
 delete mode 100644 cpp/src/plasma/fling.cc
 delete mode 100644 cpp/src/plasma/fling.h
 delete mode 100644 cpp/src/plasma/hash_table_store.cc
 delete mode 100644 cpp/src/plasma/hash_table_store.h
 delete mode 100644 cpp/src/plasma/io.cc
 delete mode 100644 cpp/src/plasma/io.h
 delete mode 100644 cpp/src/plasma/lib/java/org_apache_arrow_plasma_PlasmaClientJNI.cc
 delete mode 100644 cpp/src/plasma/lib/java/org_apache_arrow_plasma_PlasmaClientJNI.h
 delete mode 100644 cpp/src/plasma/malloc.cc
 delete mode 100644 cpp/src/plasma/malloc.h
 delete mode 100644 cpp/src/plasma/plasma.cc
 delete mode 100644 cpp/src/plasma/plasma.fbs
 delete mode 100644 cpp/src/plasma/plasma.h
 delete mode 100644 cpp/src/plasma/plasma.pc.in
 delete mode 100644 cpp/src/plasma/plasma_allocator.cc
 delete mode 100644 cpp/src/plasma/plasma_allocator.h
 delete mode 100644 cpp/src/plasma/plasma_generated.h
 delete mode 100644 cpp/src/plasma/protocol.cc
 delete mode 100644 cpp/src/plasma/protocol.h
 delete mode 100644 cpp/src/plasma/quota_aware_policy.cc
 delete mode 100644 cpp/src/plasma/quota_aware_policy.h
 delete mode 100644 cpp/src/plasma/store.cc
 delete mode 100644 cpp/src/plasma/store.h
 delete mode 100644 cpp/src/plasma/symbols.map
 delete mode 100644 cpp/src/plasma/test/client_tests.cc
 delete mode 100644 cpp/src/plasma/test/external_store_tests.cc
 delete mode 100644 cpp/src/plasma/test/serialization_tests.cc
 delete mode 100644 cpp/src/plasma/test_util.h
 delete mode 100644 cpp/src/plasma/thirdparty/ae/ae.c
 delete mode 100644 cpp/src/plasma/thirdparty/ae/ae.h
 delete mode 100644 cpp/src/plasma/thirdparty/ae/ae_epoll.c
 delete mode 100644 cpp/src/plasma/thirdparty/ae/ae_evport.c
 delete mode 100644 cpp/src/plasma/thirdparty/ae/ae_kqueue.c
 delete mode 100644 cpp/src/plasma/thirdparty/ae/ae_select.c
 delete mode 100644 cpp/src/plasma/thirdparty/ae/config.h
 delete mode 100644 cpp/src/plasma/thirdparty/ae/zmalloc.h
 delete mode 100644 cpp/src/plasma/thirdparty/dlmalloc.c
 delete mode 160000 cpp/submodules/parquet-testing
 delete mode 100644 cpp/thirdparty/README.md
 delete mode 100755 cpp/thirdparty/download_dependencies.sh
 delete mode 100644 cpp/thirdparty/flatbuffers/include/flatbuffers/base.h
 delete mode 100644 cpp/thirdparty/flatbuffers/include/flatbuffers/flatbuffers.h
 delete mode 100644 cpp/thirdparty/flatbuffers/include/flatbuffers/stl_emulation.h
 delete mode 100644 cpp/thirdparty/hadoop/include/hdfs.h
 delete mode 100644 cpp/thirdparty/versions.txt
 delete mode 100644 cpp/tools/parquet/CMakeLists.txt
 delete mode 100644 cpp/tools/parquet/parquet_dump_schema.cc
 delete mode 100644 cpp/tools/parquet/parquet_reader.cc
 delete mode 100644 cpp/tools/parquet/parquet_scan.cc
 delete mode 100644 cpp/valgrind.supp
 delete mode 100644 cpp/vcpkg.json
 delete mode 100644 csharp/.editorconfig
 delete mode 100644 csharp/.gitattributes
 delete mode 100644 csharp/.gitignore
 delete mode 100644 csharp/Apache.Arrow.sln
 delete mode 100644 csharp/ApacheArrow.snk
 delete mode 100644 csharp/Directory.Build.props
 delete mode 100644 csharp/Directory.Build.targets
 delete mode 100644 csharp/README.md
 delete mode 100644 csharp/examples/Examples.sln
 delete mode 100644 csharp/examples/FluentBuilderExample/FluentBuilderExample.csproj
 delete mode 100644 csharp/examples/FluentBuilderExample/Program.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight.AspNetCore/Apache.Arrow.Flight.AspNetCore.csproj
 delete mode 100644 csharp/src/Apache.Arrow.Flight.AspNetCore/Extensions/FlightIEndpointRouteBuilderExtensions.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight.AspNetCore/Extensions/FlightIGrpcServerBuilderExtensions.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Apache.Arrow.Flight.csproj
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Client/FlightClient.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Client/FlightClientRecordBatchStreamReader.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Client/FlightClientRecordBatchStreamWriter.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Client/FlightRecordBatchDuplexStreamingCall.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Client/FlightRecordBatchStreamingCall.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightAction.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightActionType.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightCriteria.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightDescriptor.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightDescriptorType.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightEndpoint.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightInfo.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightLocation.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightPutResult.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightRecordBatchStreamReader.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightRecordBatchStreamWriter.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightResult.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/FlightTicket.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Internal/FlightDataStream.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Internal/FlightMessageSerializer.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Internal/RecordBatcReaderImplementation.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Internal/SchemaWriter.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Internal/StreamReader.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Internal/StreamWriter.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Properties/AssemblyInfo.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Server/FlightServer.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Server/FlightServerRecordBatchStreamReader.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Server/FlightServerRecordBatchStreamWriter.cs
 delete mode 100644 csharp/src/Apache.Arrow.Flight/Server/Internal/FlightServerImplementation.cs
 delete mode 100644 csharp/src/Apache.Arrow/Apache.Arrow.csproj
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/ArrayData.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/ArrowArrayBuilderFactory.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/ArrowArrayFactory.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/ArrowArrayVisitor.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/BinaryArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/BooleanArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/Date32Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/Date64Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/DateArrayBuilder.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/Decimal128Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/Decimal256Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/DelegatingArrayBuilder.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/DoubleArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/FixedSizeBinaryArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/FloatArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/Int16Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/Int32Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/Int64Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/Int8Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/ListArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/PrimitiveArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/PrimitiveArrayBuilder.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/StringArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/StructArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/TimestampArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/UInt16Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/UInt32Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/UInt64Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/UInt8Array.cs
 delete mode 100644 csharp/src/Apache.Arrow/Arrays/UnionArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/ArrowBuffer.BitmapBuilder.cs
 delete mode 100644 csharp/src/Apache.Arrow/ArrowBuffer.Builder.cs
 delete mode 100644 csharp/src/Apache.Arrow/ArrowBuffer.cs
 delete mode 100644 csharp/src/Apache.Arrow/BitUtility.cs
 delete mode 100644 csharp/src/Apache.Arrow/ChunkedArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Column.cs
 delete mode 100644 csharp/src/Apache.Arrow/DecimalUtility.cs
 delete mode 100644 csharp/src/Apache.Arrow/Extensions/ArrayDataExtensions.cs
 delete mode 100644 csharp/src/Apache.Arrow/Extensions/ArrayPoolExtensions.cs
 delete mode 100644 csharp/src/Apache.Arrow/Extensions/ArrowTypeExtensions.cs
 delete mode 100644 csharp/src/Apache.Arrow/Extensions/FlatbufExtensions.cs
 delete mode 100644 csharp/src/Apache.Arrow/Extensions/SpanExtensions.cs
 delete mode 100644 csharp/src/Apache.Arrow/Extensions/StreamExtensions.cs
 delete mode 100644 csharp/src/Apache.Arrow/Extensions/StreamExtensions.netcoreapp2.1.cs
 delete mode 100644 csharp/src/Apache.Arrow/Extensions/StreamExtensions.netstandard.cs
 delete mode 100644 csharp/src/Apache.Arrow/Extensions/TimeSpanExtensions.cs
 delete mode 100644 csharp/src/Apache.Arrow/Extensions/TupleExtensions.netstandard.cs
 delete mode 100644 csharp/src/Apache.Arrow/Field.Builder.cs
 delete mode 100644 csharp/src/Apache.Arrow/Field.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Block.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/BodyCompression.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Buffer.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/DictionaryBatch.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/DictionaryEncoding.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/BodyCompressionMethod.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/CompressionType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/DateUnit.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/Endianness.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/Feature.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/IntervalUnit.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/MessageHeader.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/MetadataVersion.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/Precision.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/TimeUnit.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Enums/UnionMode.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Field.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FieldNode.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FixedSizeBinary.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FixedSizeList.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/ByteBuffer.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/ByteBufferUtil.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/FlatBufferBuilder.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/FlatBufferConstants.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/IFlatbufferObject.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/Offset.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/Struct.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/Table.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Footer.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/KeyValue.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Map.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Message.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/RecordBatch.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Schema.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Tensor.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/TensorDim.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Binary.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Bool.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Date.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Decimal.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Duration.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/FloatingPoint.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Int.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Interval.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/LargeBinary.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/LargeList.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/LargeUtf8.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/List.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Null.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Struct_.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Time.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Timestamp.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Union.cs
 delete mode 100644 csharp/src/Apache.Arrow/Flatbuf/Types/Utf8.cs
 delete mode 100644 csharp/src/Apache.Arrow/Interfaces/IArrowArray.cs
 delete mode 100644 csharp/src/Apache.Arrow/Interfaces/IArrowArrayBuilder.cs
 delete mode 100644 csharp/src/Apache.Arrow/Interfaces/IArrowArrayVisitor.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowFileConstants.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowFileReader.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowFileReaderImplementation.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowFileWriter.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowFooter.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowMemoryReaderImplementation.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowReaderImplementation.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowStreamReader.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowStreamReaderImplementation.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowStreamWriter.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ArrowTypeFlatbufferBuilder.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/Block.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/IArrowReader.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/IpcOptions.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/MessageSerializer.cs
 delete mode 100644 csharp/src/Apache.Arrow/Ipc/ReadOnlyMemoryBufferAllocator.cs
 delete mode 100644 csharp/src/Apache.Arrow/Memory/MemoryAllocator.cs
 delete mode 100644 csharp/src/Apache.Arrow/Memory/NativeMemoryAllocator.cs
 delete mode 100644 csharp/src/Apache.Arrow/Memory/NativeMemoryManager.cs
 delete mode 100644 csharp/src/Apache.Arrow/Memory/NullMemoryOwner.cs
 delete mode 100644 csharp/src/Apache.Arrow/Properties/AssembyInfo.cs
 delete mode 100644 csharp/src/Apache.Arrow/Properties/Resources.Designer.cs
 delete mode 100644 csharp/src/Apache.Arrow/Properties/Resources.resx
 delete mode 100644 csharp/src/Apache.Arrow/RecordBatch.Builder.cs
 delete mode 100644 csharp/src/Apache.Arrow/RecordBatch.cs
 delete mode 100644 csharp/src/Apache.Arrow/Schema.Builder.cs
 delete mode 100644 csharp/src/Apache.Arrow/Schema.cs
 delete mode 100644 csharp/src/Apache.Arrow/Table.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/ArrowType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/BinaryType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/BooleanType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/Date32Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/Date64Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/DateType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/Decimal128Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/Decimal256Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/DoubleType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/FixedSizeBinaryType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/FixedWidthType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/FloatType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/FloatingPointType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/HalfFloatType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/IArrowType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/IArrowTypeVisitor.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/Int16Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/Int32Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/Int64Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/Int8Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/IntervalUnit.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/ListType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/NestedType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/NullType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/NumberType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/StringType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/StructType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/Time32Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/Time64Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/TimeType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/TimestampType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/UInt16Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/UInt32Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/UInt64Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/UInt8Type.cs
 delete mode 100644 csharp/src/Apache.Arrow/Types/UnionType.cs
 delete mode 100644 csharp/src/Apache.Arrow/Utility.cs
 delete mode 100644 csharp/test/Apache.Arrow.Benchmarks/Apache.Arrow.Benchmarks.csproj
 delete mode 100644 csharp/test/Apache.Arrow.Benchmarks/ArrowReaderBenchmark.cs
 delete mode 100644 csharp/test/Apache.Arrow.Benchmarks/ArrowWriterBenchmark.cs
 delete mode 100644 csharp/test/Apache.Arrow.Benchmarks/Program.cs
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/Apache.Arrow.Flight.TestWeb.csproj
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/Extensions/AsyncStreamExtensions.cs
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/FlightHolder.cs
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/FlightStore.cs
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/Program.cs
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/Properties/launchSettings.json
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/RecordBatchWithMetadata.cs
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/Startup.cs
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/TestFlightServer.cs
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/appsettings.Development.json
 delete mode 100644 csharp/test/Apache.Arrow.Flight.TestWeb/appsettings.json
 delete mode 100644 csharp/test/Apache.Arrow.Flight.Tests/Apache.Arrow.Flight.Tests.csproj
 delete mode 100644 csharp/test/Apache.Arrow.Flight.Tests/FlightInfoComparer.cs
 delete mode 100644 csharp/test/Apache.Arrow.Flight.Tests/FlightTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Flight.Tests/TestWebFactory.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/Apache.Arrow.Tests.csproj
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrayBuilderTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrayTypeComparer.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrowArrayTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrowBufferBitmapBuilderTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrowBufferBuilderTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrowBufferTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrowFileReaderTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrowFileWriterTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrowReaderVerifier.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrowStreamReaderTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ArrowStreamWriterTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/BinaryArrayBuilderTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/BitUtilityTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/BooleanArrayTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/ColumnTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/Date32ArrayTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/Date64ArrayTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/Decimal128ArrayTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/Decimal256ArrayTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/DecimalUtilityTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/Extensions/DateTimeOffsetExtensions.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/FieldComparer.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/Fixtures/DefaultMemoryAllocatorFixture.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/SchemaBuilderTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/SchemaComparer.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/StructArrayTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/TableTests.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/TestData.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/TestDateAndTimeData.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/TestMemoryAllocator.cs
 delete mode 100644 csharp/test/Apache.Arrow.Tests/TypeTests.cs
 delete mode 100644 csharp/test/Directory.Build.props
 delete mode 100644 docs/.gitignore
 delete mode 100644 docs/Makefile
 delete mode 100644 docs/README.md
 delete mode 100644 docs/environment.yml
 delete mode 100644 docs/make.bat
 delete mode 100644 docs/requirements.txt
 delete mode 100644 docs/source/_static/arrow.png
 delete mode 100644 docs/source/_static/favicon.ico
 delete mode 100644 docs/source/_static/theme_overrides.css
 delete mode 100644 docs/source/_templates/docs-sidebar.html
 delete mode 100644 docs/source/_templates/layout.html
 delete mode 100644 docs/source/conf.py
 delete mode 100644 docs/source/cpp/api.rst
 delete mode 100644 docs/source/cpp/api/array.rst
 delete mode 100644 docs/source/cpp/api/builder.rst
 delete mode 100644 docs/source/cpp/api/c_abi.rst
 delete mode 100644 docs/source/cpp/api/compute.rst
 delete mode 100644 docs/source/cpp/api/cuda.rst
 delete mode 100644 docs/source/cpp/api/dataset.rst
 delete mode 100644 docs/source/cpp/api/datatype.rst
 delete mode 100644 docs/source/cpp/api/filesystem.rst
 delete mode 100644 docs/source/cpp/api/flight.rst
 delete mode 100644 docs/source/cpp/api/formats.rst
 delete mode 100644 docs/source/cpp/api/io.rst
 delete mode 100644 docs/source/cpp/api/ipc.rst
 delete mode 100644 docs/source/cpp/api/memory.rst
 delete mode 100644 docs/source/cpp/api/scalar.rst
 delete mode 100644 docs/source/cpp/api/support.rst
 delete mode 100644 docs/source/cpp/api/table.rst
 delete mode 100644 docs/source/cpp/api/tensor.rst
 delete mode 100644 docs/source/cpp/api/utilities.rst
 delete mode 100644 docs/source/cpp/arrays.rst
 delete mode 100644 docs/source/cpp/cmake.rst
 delete mode 100644 docs/source/cpp/compute.rst
 delete mode 100644 docs/source/cpp/conventions.rst
 delete mode 100644 docs/source/cpp/csv.rst
 delete mode 100644 docs/source/cpp/dataset.rst
 delete mode 100644 docs/source/cpp/datatypes.rst
 delete mode 100644 docs/source/cpp/examples/cmake_minimal_build.rst
 delete mode 100644 docs/source/cpp/examples/dataset_documentation_example.rst
 delete mode 100644 docs/source/cpp/examples/index.rst
 delete mode 100644 docs/source/cpp/examples/row_columnar_conversion.rst
 delete mode 100644 docs/source/cpp/examples/tuple_range_conversion.rst
 delete mode 100644 docs/source/cpp/flight.rst
 delete mode 100644 docs/source/cpp/getting_started.rst
 delete mode 100644 docs/source/cpp/index.rst
 delete mode 100644 docs/source/cpp/io.rst
 delete mode 100644 docs/source/cpp/ipc.rst
 delete mode 100644 docs/source/cpp/json.rst
 delete mode 100644 docs/source/cpp/memory.rst
 delete mode 100644 docs/source/cpp/overview.rst
 delete mode 100644 docs/source/cpp/parquet.rst
 delete mode 100644 docs/source/cpp/tables.rst
 delete mode 100644 docs/source/developers/archery.rst
 delete mode 100644 docs/source/developers/benchmarks.rst
 delete mode 100644 docs/source/developers/contributing.rst
 delete mode 100644 docs/source/developers/cpp/building.rst
 delete mode 100644 docs/source/developers/cpp/conventions.rst
 delete mode 100644 docs/source/developers/cpp/development.rst
 delete mode 100644 docs/source/developers/cpp/fuzzing.rst
 delete mode 100644 docs/source/developers/cpp/index.rst
 delete mode 100644 docs/source/developers/cpp/windows.rst
 delete mode 100644 docs/source/developers/crossbow.rst
 delete mode 100644 docs/source/developers/docker.rst
 delete mode 100644 docs/source/developers/documentation.rst
 delete mode 100644 docs/source/developers/python.rst
 delete mode 100644 docs/source/example.gz
 delete mode 100644 docs/source/format/Arrow.graffle
 delete mode 100644 docs/source/format/Arrow.png
 delete mode 100644 docs/source/format/CDataInterface.rst
 delete mode 100644 docs/source/format/CStreamInterface.rst
 delete mode 100644 docs/source/format/Columnar.rst
 delete mode 100644 docs/source/format/Flight.rst
 delete mode 100644 docs/source/format/Guidelines.rst
 delete mode 100644 docs/source/format/IPC.rst
 delete mode 100644 docs/source/format/Integration.rst
 delete mode 100644 docs/source/format/Layout.rst
 delete mode 100644 docs/source/format/Metadata.rst
 delete mode 100644 docs/source/format/Other.rst
 delete mode 100644 docs/source/format/README.md
 delete mode 100644 docs/source/format/Versioning.rst
 delete mode 100644 docs/source/format/integration_json_examples/simple.json
 delete mode 100644 docs/source/format/integration_json_examples/struct.json
 delete mode 100644 docs/source/index.rst
 delete mode 100644 docs/source/java/index.rst
 delete mode 100644 docs/source/java/ipc.rst
 delete mode 100644 docs/source/java/vector.rst
 delete mode 100644 docs/source/java/vector_schema_root.rst
 delete mode 100644 docs/source/python/api.rst
 delete mode 100644 docs/source/python/api/arrays.rst
 delete mode 100644 docs/source/python/api/compute.rst
 delete mode 100644 docs/source/python/api/cuda.rst
 delete mode 100644 docs/source/python/api/dataset.rst
 delete mode 100644 docs/source/python/api/datatypes.rst
 delete mode 100644 docs/source/python/api/files.rst
 delete mode 100644 docs/source/python/api/filesystems.rst
 delete mode 100644 docs/source/python/api/flight.rst
 delete mode 100644 docs/source/python/api/formats.rst
 delete mode 100644 docs/source/python/api/ipc.rst
 delete mode 100644 docs/source/python/api/memory.rst
 delete mode 100644 docs/source/python/api/misc.rst
 delete mode 100644 docs/source/python/api/plasma.rst
 delete mode 100644 docs/source/python/api/tables.rst
 delete mode 100644 docs/source/python/benchmarks.rst
 delete mode 100644 docs/source/python/compute.rst
 delete mode 100644 docs/source/python/csv.rst
 delete mode 100644 docs/source/python/cuda.rst
 delete mode 100644 docs/source/python/data.rst
 delete mode 100644 docs/source/python/dataset.rst
 delete mode 100644 docs/source/python/extending.rst
 delete mode 100644 docs/source/python/extending_types.rst
 delete mode 100644 docs/source/python/feather.rst
 delete mode 100644 docs/source/python/filesystems.rst
 delete mode 100644 docs/source/python/filesystems_deprecated.rst
 delete mode 100644 docs/source/python/getting_involved.rst
 delete mode 100644 docs/source/python/index.rst
 delete mode 100644 docs/source/python/install.rst
 delete mode 100644 docs/source/python/ipc.rst
 delete mode 100644 docs/source/python/json.rst
 delete mode 100644 docs/source/python/memory.rst
 delete mode 100644 docs/source/python/numpy.rst
 delete mode 100644 docs/source/python/pandas.rst
 delete mode 100644 docs/source/python/parquet.rst
 delete mode 100644 docs/source/python/plasma.rst
 delete mode 100644 docs/source/python/timestamps.rst
 delete mode 100644 docs/source/status.rst
 delete mode 100644 go/README.md
 delete mode 100644 go/arrow/.editorconfig
 delete mode 100644 go/arrow/.gitignore
 delete mode 100644 go/arrow/Gopkg.lock
 delete mode 100644 go/arrow/Gopkg.toml
 delete mode 100644 go/arrow/LICENSE.txt
 delete mode 100644 go/arrow/Makefile
 delete mode 100644 go/arrow/_examples/helloworld/main.go
 delete mode 100644 go/arrow/_tools/tmpl/main.go
 delete mode 100644 go/arrow/_tools/tmpl/main_test.go
 delete mode 100644 go/arrow/array/array.go
 delete mode 100644 go/arrow/array/array_test.go
 delete mode 100644 go/arrow/array/binary.go
 delete mode 100644 go/arrow/array/binary_test.go
 delete mode 100644 go/arrow/array/binarybuilder.go
 delete mode 100644 go/arrow/array/binarybuilder_test.go
 delete mode 100644 go/arrow/array/boolean.go
 delete mode 100644 go/arrow/array/boolean_test.go
 delete mode 100644 go/arrow/array/booleanbuilder.go
 delete mode 100644 go/arrow/array/booleanbuilder_test.go
 delete mode 100644 go/arrow/array/bufferbuilder.go
 delete mode 100644 go/arrow/array/bufferbuilder_byte.go
 delete mode 100644 go/arrow/array/bufferbuilder_numeric.gen.go
 delete mode 100644 go/arrow/array/bufferbuilder_numeric.gen.go.tmpl
 delete mode 100644 go/arrow/array/bufferbuilder_numeric_test.go
 delete mode 100644 go/arrow/array/builder.go
 delete mode 100644 go/arrow/array/builder_test.go
 delete mode 100644 go/arrow/array/compare.go
 delete mode 100644 go/arrow/array/compare_test.go
 delete mode 100644 go/arrow/array/data.go
 delete mode 100644 go/arrow/array/data_test.go
 delete mode 100644 go/arrow/array/decimal128.go
 delete mode 100644 go/arrow/array/decimal128_test.go
 delete mode 100644 go/arrow/array/doc.go
 delete mode 100644 go/arrow/array/fixed_size_list.go
 delete mode 100644 go/arrow/array/fixed_size_list_test.go
 delete mode 100644 go/arrow/array/fixedsize_binary.go
 delete mode 100644 go/arrow/array/fixedsize_binary_test.go
 delete mode 100644 go/arrow/array/fixedsize_binarybuilder.go
 delete mode 100644 go/arrow/array/fixedsize_binarybuilder_test.go
 delete mode 100644 go/arrow/array/float16.go
 delete mode 100644 go/arrow/array/float16_builder.go
 delete mode 100644 go/arrow/array/float16_builder_test.go
 delete mode 100644 go/arrow/array/interval.go
 delete mode 100644 go/arrow/array/interval_test.go
 delete mode 100644 go/arrow/array/list.go
 delete mode 100644 go/arrow/array/list_test.go
 delete mode 100644 go/arrow/array/null.go
 delete mode 100644 go/arrow/array/null_test.go
 delete mode 100644 go/arrow/array/numeric.gen.go
 delete mode 100644 go/arrow/array/numeric.gen.go.tmpl
 delete mode 100644 go/arrow/array/numeric_test.go
 delete mode 100644 go/arrow/array/numericbuilder.gen.go
 delete mode 100644 go/arrow/array/numericbuilder.gen.go.tmpl
 delete mode 100644 go/arrow/array/numericbuilder.gen_test.go
 delete mode 100644 go/arrow/array/numericbuilder.gen_test.go.tmpl
 delete mode 100644 go/arrow/array/record.go
 delete mode 100644 go/arrow/array/record_test.go
 delete mode 100644 go/arrow/array/string.go
 delete mode 100644 go/arrow/array/string_test.go
 delete mode 100644 go/arrow/array/struct.go
 delete mode 100644 go/arrow/array/struct_test.go
 delete mode 100644 go/arrow/array/table.go
 delete mode 100644 go/arrow/array/table_test.go
 delete mode 100644 go/arrow/array/util.go
 delete mode 100644 go/arrow/arrio/arrio.go
 delete mode 100644 go/arrow/arrio/arrio_test.go
 delete mode 100644 go/arrow/bitutil/bitutil.go
 delete mode 100644 go/arrow/bitutil/bitutil_test.go
 delete mode 100644 go/arrow/compare.go
 delete mode 100644 go/arrow/compare_test.go
 delete mode 100644 go/arrow/csv/common.go
 delete mode 100644 go/arrow/csv/reader.go
 delete mode 100644 go/arrow/csv/reader_test.go
 delete mode 100644 go/arrow/csv/testdata/header.csv
 delete mode 100644 go/arrow/csv/testdata/simple.csv
 delete mode 100644 go/arrow/csv/testdata/types.csv
 delete mode 100644 go/arrow/csv/writer.go
 delete mode 100644 go/arrow/csv/writer_test.go
 delete mode 100644 go/arrow/datatype.go
 delete mode 100644 go/arrow/datatype_binary.go
 delete mode 100644 go/arrow/datatype_binary_test.go
 delete mode 100644 go/arrow/datatype_fixedwidth.go
 delete mode 100644 go/arrow/datatype_fixedwidth_test.go
 delete mode 100644 go/arrow/datatype_nested.go
 delete mode 100644 go/arrow/datatype_nested_test.go
 delete mode 100644 go/arrow/datatype_null.go
 delete mode 100644 go/arrow/datatype_null_test.go
 delete mode 100644 go/arrow/datatype_numeric.gen.go
 delete mode 100644 go/arrow/datatype_numeric.gen.go.tmpl
 delete mode 100644 go/arrow/datatype_numeric.gen.go.tmpldata
 delete mode 100644 go/arrow/decimal128/decimal128.go
 delete mode 100644 go/arrow/decimal128/decimal128_test.go
 delete mode 100644 go/arrow/doc.go
 delete mode 100644 go/arrow/endian/big.go
 delete mode 100644 go/arrow/endian/little.go
 delete mode 100644 go/arrow/example_test.go
 delete mode 100644 go/arrow/flight/Flight.pb.go
 delete mode 100644 go/arrow/flight/Flight_grpc.pb.go
 delete mode 100644 go/arrow/flight/basic_auth_flight_test.go
 delete mode 100644 go/arrow/flight/client.go
 delete mode 100644 go/arrow/flight/client_auth.go
 delete mode 100644 go/arrow/flight/example_flight_server_test.go
 delete mode 100644 go/arrow/flight/flight_test.go
 delete mode 100644 go/arrow/flight/gen.go
 delete mode 100644 go/arrow/flight/record_batch_reader.go
 delete mode 100644 go/arrow/flight/record_batch_writer.go
 delete mode 100644 go/arrow/flight/server.go
 delete mode 100644 go/arrow/flight/server_auth.go
 delete mode 100644 go/arrow/float16/float16.go
 delete mode 100644 go/arrow/float16/float16_test.go
 delete mode 100644 go/arrow/gen-flatbuffers.go
 delete mode 100644 go/arrow/go.mod
 delete mode 100644 go/arrow/go.sum
 delete mode 100644 go/arrow/internal/arrdata/arrdata.go
 delete mode 100644 go/arrow/internal/arrdata/ioutil.go
 delete mode 100644 go/arrow/internal/arrjson/arrjson.go
 delete mode 100644 go/arrow/internal/arrjson/arrjson_test.go
 delete mode 100644 go/arrow/internal/arrjson/option.go
 delete mode 100644 go/arrow/internal/arrjson/reader.go
 delete mode 100644 go/arrow/internal/arrjson/writer.go
 delete mode 100644 go/arrow/internal/cpu/README.md
 delete mode 100644 go/arrow/internal/cpu/cpu.go
 delete mode 100644 go/arrow/internal/cpu/cpu_s390x.go
 delete mode 100644 go/arrow/internal/cpu/cpu_test.go
 delete mode 100644 go/arrow/internal/cpu/cpu_x86.go
 delete mode 100644 go/arrow/internal/cpu/cpu_x86.s
 delete mode 100644 go/arrow/internal/debug/assert_off.go
 delete mode 100644 go/arrow/internal/debug/assert_on.go
 delete mode 100644 go/arrow/internal/debug/doc.go
 delete mode 100644 go/arrow/internal/debug/log_off.go
 delete mode 100644 go/arrow/internal/debug/log_on.go
 delete mode 100644 go/arrow/internal/debug/util.go
 delete mode 100644 go/arrow/internal/flatbuf/Binary.go
 delete mode 100644 go/arrow/internal/flatbuf/Block.go
 delete mode 100644 go/arrow/internal/flatbuf/BodyCompression.go
 delete mode 100644 go/arrow/internal/flatbuf/BodyCompressionMethod.go
 delete mode 100644 go/arrow/internal/flatbuf/Bool.go
 delete mode 100644 go/arrow/internal/flatbuf/Buffer.go
 delete mode 100644 go/arrow/internal/flatbuf/CompressionType.go
 delete mode 100644 go/arrow/internal/flatbuf/Date.go
 delete mode 100644 go/arrow/internal/flatbuf/DateUnit.go
 delete mode 100644 go/arrow/internal/flatbuf/Decimal.go
 delete mode 100644 go/arrow/internal/flatbuf/DictionaryBatch.go
 delete mode 100644 go/arrow/internal/flatbuf/DictionaryEncoding.go
 delete mode 100644 go/arrow/internal/flatbuf/DictionaryKind.go
 delete mode 100644 go/arrow/internal/flatbuf/Duration.go
 delete mode 100644 go/arrow/internal/flatbuf/Endianness.go
 delete mode 100644 go/arrow/internal/flatbuf/Feature.go
 delete mode 100644 go/arrow/internal/flatbuf/Field.go
 delete mode 100644 go/arrow/internal/flatbuf/FieldNode.go
 delete mode 100644 go/arrow/internal/flatbuf/FixedSizeBinary.go
 delete mode 100644 go/arrow/internal/flatbuf/FixedSizeList.go
 delete mode 100644 go/arrow/internal/flatbuf/FloatingPoint.go
 delete mode 100644 go/arrow/internal/flatbuf/Footer.go
 delete mode 100644 go/arrow/internal/flatbuf/Int.go
 delete mode 100644 go/arrow/internal/flatbuf/Interval.go
 delete mode 100644 go/arrow/internal/flatbuf/IntervalUnit.go
 delete mode 100644 go/arrow/internal/flatbuf/KeyValue.go
 delete mode 100644 go/arrow/internal/flatbuf/LargeBinary.go
 delete mode 100644 go/arrow/internal/flatbuf/LargeList.go
 delete mode 100644 go/arrow/internal/flatbuf/LargeUtf8.go
 delete mode 100644 go/arrow/internal/flatbuf/List.go
 delete mode 100644 go/arrow/internal/flatbuf/Map.go
 delete mode 100644 go/arrow/internal/flatbuf/Message.go
 delete mode 100644 go/arrow/internal/flatbuf/MessageHeader.go
 delete mode 100644 go/arrow/internal/flatbuf/MetadataVersion.go
 delete mode 100644 go/arrow/internal/flatbuf/Null.go
 delete mode 100644 go/arrow/internal/flatbuf/Precision.go
 delete mode 100644 go/arrow/internal/flatbuf/RecordBatch.go
 delete mode 100644 go/arrow/internal/flatbuf/Schema.go
 delete mode 100644 go/arrow/internal/flatbuf/SparseMatrixCompressedAxis.go
 delete mode 100644 go/arrow/internal/flatbuf/SparseMatrixIndexCSR.go
 delete mode 100644 go/arrow/internal/flatbuf/SparseMatrixIndexCSX.go
 delete mode 100644 go/arrow/internal/flatbuf/SparseTensor.go
 delete mode 100644 go/arrow/internal/flatbuf/SparseTensorIndex.go
 delete mode 100644 go/arrow/internal/flatbuf/SparseTensorIndexCOO.go
 delete mode 100644 go/arrow/internal/flatbuf/SparseTensorIndexCSF.go
 delete mode 100644 go/arrow/internal/flatbuf/Struct_.go
 delete mode 100644 go/arrow/internal/flatbuf/Tensor.go
 delete mode 100644 go/arrow/internal/flatbuf/TensorDim.go
 delete mode 100644 go/arrow/internal/flatbuf/Time.go
 delete mode 100644 go/arrow/internal/flatbuf/TimeUnit.go
 delete mode 100644 go/arrow/internal/flatbuf/Timestamp.go
 delete mode 100644 go/arrow/internal/flatbuf/Type.go
 delete mode 100644 go/arrow/internal/flatbuf/Union.go
 delete mode 100644 go/arrow/internal/flatbuf/UnionMode.go
 delete mode 100644 go/arrow/internal/flatbuf/Utf8.go
 delete mode 100644 go/arrow/internal/testing/tools/bits.go
 delete mode 100644 go/arrow/internal/testing/tools/bits_test.go
 delete mode 100644 go/arrow/internal/testing/tools/bool.go
 delete mode 100644 go/arrow/ipc/cmd/arrow-cat/main.go
 delete mode 100644 go/arrow/ipc/cmd/arrow-cat/main_test.go
 delete mode 100644 go/arrow/ipc/cmd/arrow-file-to-stream/main.go
 delete mode 100644 go/arrow/ipc/cmd/arrow-file-to-stream/main_test.go
 delete mode 100644 go/arrow/ipc/cmd/arrow-json-integration-test/main.go
 delete mode 100644 go/arrow/ipc/cmd/arrow-json-integration-test/main_test.go
 delete mode 100644 go/arrow/ipc/cmd/arrow-ls/main.go
 delete mode 100644 go/arrow/ipc/cmd/arrow-ls/main_test.go
 delete mode 100644 go/arrow/ipc/cmd/arrow-stream-to-file/main.go
 delete mode 100644 go/arrow/ipc/cmd/arrow-stream-to-file/main_test.go
 delete mode 100644 go/arrow/ipc/compression.go
 delete mode 100644 go/arrow/ipc/dict.go
 delete mode 100644 go/arrow/ipc/dict_test.go
 delete mode 100644 go/arrow/ipc/file_reader.go
 delete mode 100644 go/arrow/ipc/file_test.go
 delete mode 100644 go/arrow/ipc/file_writer.go
 delete mode 100644 go/arrow/ipc/ipc.go
 delete mode 100644 go/arrow/ipc/message.go
 delete mode 100644 go/arrow/ipc/metadata.go
 delete mode 100644 go/arrow/ipc/metadata_test.go
 delete mode 100644 go/arrow/ipc/reader.go
 delete mode 100644 go/arrow/ipc/stream_test.go
 delete mode 100644 go/arrow/ipc/writer.go
 delete mode 100644 go/arrow/math/Makefile
 delete mode 100644 go/arrow/math/_lib/.gitignore
 delete mode 100644 go/arrow/math/_lib/CMakeLists.txt
 delete mode 100644 go/arrow/math/_lib/arch.h
 delete mode 100644 go/arrow/math/_lib/float64.c
 delete mode 100644 go/arrow/math/_lib/float64_avx2.s
 delete mode 100644 go/arrow/math/_lib/float64_sse4.s
 delete mode 100644 go/arrow/math/_lib/int64.c
 delete mode 100644 go/arrow/math/_lib/int64_avx2.s
 delete mode 100644 go/arrow/math/_lib/int64_sse4.s
 delete mode 100644 go/arrow/math/_lib/uint64.c
 delete mode 100644 go/arrow/math/_lib/uint64_avx2.s
 delete mode 100644 go/arrow/math/_lib/uint64_sse4.s
 delete mode 100644 go/arrow/math/doc.go
 delete mode 100644 go/arrow/math/float64.go
 delete mode 100644 go/arrow/math/float64.tmpldata
 delete mode 100644 go/arrow/math/float64_amd64.go
 delete mode 100644 go/arrow/math/float64_avx2_amd64.go
 delete mode 100644 go/arrow/math/float64_avx2_amd64.s
 delete mode 100644 go/arrow/math/float64_noasm.go
 delete mode 100644 go/arrow/math/float64_s390x.go
 delete mode 100644 go/arrow/math/float64_sse4_amd64.go
 delete mode 100644 go/arrow/math/float64_sse4_amd64.s
 delete mode 100644 go/arrow/math/float64_test.go
 delete mode 100644 go/arrow/math/int64.go
 delete mode 100644 go/arrow/math/int64.tmpldata
 delete mode 100644 go/arrow/math/int64_amd64.go
 delete mode 100644 go/arrow/math/int64_avx2_amd64.go
 delete mode 100644 go/arrow/math/int64_avx2_amd64.s
 delete mode 100644 go/arrow/math/int64_noasm.go
 delete mode 100644 go/arrow/math/int64_s390x.go
 delete mode 100644 go/arrow/math/int64_sse4_amd64.go
 delete mode 100644 go/arrow/math/int64_sse4_amd64.s
 delete mode 100644 go/arrow/math/int64_test.go
 delete mode 100644 go/arrow/math/math_amd64.go
 delete mode 100644 go/arrow/math/math_noasm.go
 delete mode 100644 go/arrow/math/math_s390x.go
 delete mode 100644 go/arrow/math/type.go.tmpl
 delete mode 100644 go/arrow/math/type_amd64.go.tmpl
 delete mode 100644 go/arrow/math/type_noasm.go.tmpl
 delete mode 100644 go/arrow/math/type_s390x.go.tmpl
 delete mode 100644 go/arrow/math/type_simd_amd64.go.tmpl
 delete mode 100644 go/arrow/math/type_test.go.tmpl
 delete mode 100644 go/arrow/math/uint64.go
 delete mode 100644 go/arrow/math/uint64.tmpldata
 delete mode 100644 go/arrow/math/uint64_amd64.go
 delete mode 100644 go/arrow/math/uint64_avx2_amd64.go
 delete mode 100644 go/arrow/math/uint64_avx2_amd64.s
 delete mode 100644 go/arrow/math/uint64_noasm.go
 delete mode 100644 go/arrow/math/uint64_s390x.go
 delete mode 100644 go/arrow/math/uint64_sse4_amd64.go
 delete mode 100644 go/arrow/math/uint64_sse4_amd64.s
 delete mode 100644 go/arrow/math/uint64_test.go
 delete mode 100644 go/arrow/memory/Makefile
 delete mode 100644 go/arrow/memory/_lib/.gitignore
 delete mode 100644 go/arrow/memory/_lib/CMakeLists.txt
 delete mode 100644 go/arrow/memory/_lib/arch.h
 delete mode 100644 go/arrow/memory/_lib/memory.c
 delete mode 100644 go/arrow/memory/_lib/memory_avx2.s
 delete mode 100644 go/arrow/memory/_lib/memory_sse4.s
 delete mode 100644 go/arrow/memory/allocator.go
 delete mode 100644 go/arrow/memory/buffer.go
 delete mode 100644 go/arrow/memory/buffer_test.go
 delete mode 100644 go/arrow/memory/checked_allocator.go
 delete mode 100644 go/arrow/memory/doc.go
 delete mode 100644 go/arrow/memory/go_allocator.go
 delete mode 100644 go/arrow/memory/go_allocator_test.go
 delete mode 100644 go/arrow/memory/memory.go
 delete mode 100644 go/arrow/memory/memory_amd64.go
 delete mode 100644 go/arrow/memory/memory_avx2_amd64.go
 delete mode 100644 go/arrow/memory/memory_avx2_amd64.s
 delete mode 100644 go/arrow/memory/memory_js_wasm.go
 delete mode 100644 go/arrow/memory/memory_noasm.go
 delete mode 100644 go/arrow/memory/memory_sse4_amd64.go
 delete mode 100644 go/arrow/memory/memory_sse4_amd64.s
 delete mode 100644 go/arrow/memory/memory_test.go
 delete mode 100644 go/arrow/memory/util.go
 delete mode 100644 go/arrow/memory/util_test.go
 delete mode 100644 go/arrow/numeric.schema.json
 delete mode 100644 go/arrow/numeric.tmpldata
 delete mode 100644 go/arrow/schema.go
 delete mode 100644 go/arrow/schema_test.go
 delete mode 100644 go/arrow/tensor/numeric.gen.go
 delete mode 100644 go/arrow/tensor/numeric.gen.go.tmpl
 delete mode 100644 go/arrow/tensor/numeric.gen_test.go
 delete mode 100644 go/arrow/tensor/numeric.gen_test.go.tmpl
 delete mode 100644 go/arrow/tensor/tensor.go
 delete mode 100644 go/arrow/tensor/tensor_test.go
 delete mode 100644 go/arrow/type_string.go
 delete mode 100644 go/arrow/type_traits_boolean.go
 delete mode 100644 go/arrow/type_traits_decimal128.go
 delete mode 100644 go/arrow/type_traits_float16.go
 delete mode 100644 go/arrow/type_traits_interval.go
 delete mode 100644 go/arrow/type_traits_numeric.gen.go
 delete mode 100644 go/arrow/type_traits_numeric.gen.go.tmpl
 delete mode 100644 go/arrow/type_traits_numeric.gen_test.go
 delete mode 100644 go/arrow/type_traits_numeric.gen_test.go.tmpl
 delete mode 100644 go/arrow/type_traits_test.go
 delete mode 100644 go/parquet/.gitignore
 delete mode 100644 go/parquet/LICENSE.txt
 delete mode 100644 go/parquet/compress/brotli.go
 delete mode 100644 go/parquet/compress/compress.go
 delete mode 100644 go/parquet/compress/compress_test.go
 delete mode 100644 go/parquet/compress/gzip.go
 delete mode 100644 go/parquet/compress/snappy.go
 delete mode 100644 go/parquet/compress/zstd.go
 delete mode 100644 go/parquet/doc.go
 delete mode 100644 go/parquet/encryption_properties.go
 delete mode 100644 go/parquet/encryption_properties_test.go
 delete mode 100644 go/parquet/go.mod
 delete mode 100644 go/parquet/go.sum
 delete mode 100644 go/parquet/internal/bmi/Makefile
 delete mode 100644 go/parquet/internal/bmi/_lib/bitmap_bmi2.c
 delete mode 100644 go/parquet/internal/bmi/_lib/bitmap_bmi2.s
 delete mode 100644 go/parquet/internal/bmi/bitmap_bmi2.go
 delete mode 100644 go/parquet/internal/bmi/bitmap_bmi2.s
 delete mode 100644 go/parquet/internal/bmi/bmi_init.go
 delete mode 100644 go/parquet/internal/bmi/bmi_noasm.go
 delete mode 100644 go/parquet/internal/debug/assert_off.go
 delete mode 100644 go/parquet/internal/debug/assert_on.go
 delete mode 100644 go/parquet/internal/debug/doc.go
 delete mode 100644 go/parquet/internal/encryption/aes.go
 delete mode 100644 go/parquet/internal/encryption/decryptor.go
 delete mode 100644 go/parquet/internal/encryption/encryptor.go
 delete mode 100644 go/parquet/internal/encryption/key_handling.go
 delete mode 100644 go/parquet/internal/gen-go/parquet/GoUnusedProtection__.go
 delete mode 100644 go/parquet/internal/gen-go/parquet/parquet-consts.go
 delete mode 100644 go/parquet/internal/gen-go/parquet/parquet.go
 delete mode 100644 go/parquet/internal/gen-go/parquet/staticcheck.conf
 delete mode 100644 go/parquet/internal/testutils/random.go
 delete mode 100644 go/parquet/internal/testutils/random_arrow.go
 delete mode 100644 go/parquet/internal/thrift/helpers.go
 delete mode 100644 go/parquet/internal/utils/Makefile
 delete mode 100644 go/parquet/internal/utils/_lib/arch.h
 delete mode 100644 go/parquet/internal/utils/_lib/bit_packing_avx2.c
 delete mode 100644 go/parquet/internal/utils/_lib/bit_packing_avx2.s
 delete mode 100644 go/parquet/internal/utils/_lib/min_max.c
 delete mode 100644 go/parquet/internal/utils/_lib/min_max_avx2.s
 delete mode 100644 go/parquet/internal/utils/_lib/min_max_sse4.s
 delete mode 100644 go/parquet/internal/utils/_lib/unpack_bool.c
 delete mode 100644 go/parquet/internal/utils/_lib/unpack_bool_avx2.s
 delete mode 100644 go/parquet/internal/utils/_lib/unpack_bool_sse4.s
 delete mode 100644 go/parquet/internal/utils/bit_benchmark_test.go
 delete mode 100644 go/parquet/internal/utils/bit_block_counter.go
 delete mode 100644 go/parquet/internal/utils/bit_block_counter_test.go
 delete mode 100644 go/parquet/internal/utils/bit_packing.go
 delete mode 100644 go/parquet/internal/utils/bit_packing_avx2.go
 delete mode 100644 go/parquet/internal/utils/bit_packing_avx2.s
 delete mode 100644 go/parquet/internal/utils/bit_packing_default.go
 delete mode 100644 go/parquet/internal/utils/bit_packing_noasm.go
 delete mode 100644 go/parquet/internal/utils/bit_reader.go
 delete mode 100644 go/parquet/internal/utils/bit_reader_test.go
 delete mode 100644 go/parquet/internal/utils/bit_run_reader.go
 delete mode 100644 go/parquet/internal/utils/bit_run_reader_test.go
 delete mode 100644 go/parquet/internal/utils/bit_set_run_reader.go
 delete mode 100644 go/parquet/internal/utils/bit_set_run_reader_test.go
 delete mode 100644 go/parquet/internal/utils/bit_writer.go
 delete mode 100644 go/parquet/internal/utils/bitmap_reader.go
 delete mode 100644 go/parquet/internal/utils/bitmap_reader_test.go
 delete mode 100644 go/parquet/internal/utils/bitmap_writer.go
 delete mode 100644 go/parquet/internal/utils/bitmap_writer_test.go
 delete mode 100644 go/parquet/internal/utils/clib_amd64.s
 delete mode 100644 go/parquet/internal/utils/dictionary.go
 delete mode 100644 go/parquet/internal/utils/math.go
 delete mode 100644 go/parquet/internal/utils/min_max.go
 delete mode 100644 go/parquet/internal/utils/min_max_amd64.go
 delete mode 100644 go/parquet/internal/utils/min_max_avx2.go
 delete mode 100644 go/parquet/internal/utils/min_max_avx2.s
 delete mode 100644 go/parquet/internal/utils/min_max_noasm.go
 delete mode 100644 go/parquet/internal/utils/min_max_sse4.go
 delete mode 100644 go/parquet/internal/utils/min_max_sse4.s
 delete mode 100644 go/parquet/internal/utils/physical_types.tmpldata
 delete mode 100644 go/parquet/internal/utils/rle.go
 delete mode 100644 go/parquet/internal/utils/typed_rle_dict.gen.go
 delete mode 100644 go/parquet/internal/utils/typed_rle_dict.gen.go.tmpl
 delete mode 100644 go/parquet/internal/utils/unpack_bool.go
 delete mode 100644 go/parquet/internal/utils/unpack_bool_amd64.go
 delete mode 100644 go/parquet/internal/utils/unpack_bool_avx2.go
 delete mode 100644 go/parquet/internal/utils/unpack_bool_avx2.s
 delete mode 100644 go/parquet/internal/utils/unpack_bool_noasm.go
 delete mode 100644 go/parquet/internal/utils/unpack_bool_sse4.go
 delete mode 100644 go/parquet/internal/utils/unpack_bool_sse4.s
 delete mode 100644 go/parquet/internal/utils/write_utils.go
 delete mode 100644 go/parquet/reader_properties.go
 delete mode 100644 go/parquet/reader_writer_properties_test.go
 delete mode 100644 go/parquet/tools.go
 delete mode 100644 go/parquet/types.go
 delete mode 100644 go/parquet/writer_properties.go
 delete mode 100644 java/.gitattributes
 delete mode 100644 java/.gitignore
 delete mode 100644 java/README.md
 delete mode 100644 java/adapter/avro/pom.xml
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrow.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowConfig.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowConfigBuilder.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowUtils.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowVectorIterator.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroArraysConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroBooleanConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroBytesConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroDoubleConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroEnumConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroFixedConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroFloatConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroIntConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroLongConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroMapConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroNullConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroStringConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroStructConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroUnionsConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/BaseAvroConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/CompositeAvroConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/Consumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/SkipConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/SkipFunction.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroDateConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroDecimalConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimeMicroConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimeMillisConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimestampMicrosConsumer.java
 delete mode 100644 java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimestampMillisConsumer.java
 delete mode 100644 java/adapter/avro/src/test/java/org/apache/arrow/AvroLogicalTypesTest.java
 delete mode 100644 java/adapter/avro/src/test/java/org/apache/arrow/AvroSkipFieldTest.java
 delete mode 100644 java/adapter/avro/src/test/java/org/apache/arrow/AvroTestBase.java
 delete mode 100644 java/adapter/avro/src/test/java/org/apache/arrow/AvroToArrowIteratorTest.java
 delete mode 100644 java/adapter/avro/src/test/java/org/apache/arrow/AvroToArrowTest.java
 delete mode 100644 java/adapter/avro/src/test/java/org/apache/arrow/TestWriteReadAvroRecord.java
 delete mode 100644 java/adapter/avro/src/test/resources/schema/attrs/test_enum_attrs.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/attrs/test_fixed_attr.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/attrs/test_record_attrs.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_date.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid1.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid2.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid3.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid4.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_decimal_with_original_bytes.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_decimal_with_original_fixed.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_time_micros.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_time_millis.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_timestamp_micros.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/logical/test_timestamp_millis.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_array_before.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_array_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_base1.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_base2.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_boolean_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_bytes_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_double_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_enum_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_fixed_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_float_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_int_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_long_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_map_before.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_map_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_multi_fields_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_record_before.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_record_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_second_level_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_single_field_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_string_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_third_level_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_union_before.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_union_multi_fields_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_union_nullable_field_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/skip/test_skip_union_one_field_expected.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_array.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_fixed.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_large_data.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_map.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_nested_record.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_nullable_boolean.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_nullable_bytes.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_nullable_double.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_nullable_float.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_nullable_int.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_nullable_long.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_nullable_string.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_nullable_union.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_primitive_boolean.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_primitive_bytes.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_primitive_double.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_primitive_enum.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_primitive_float.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_primitive_int.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_primitive_long.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_primitive_string.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_record.avsc
 delete mode 100644 java/adapter/avro/src/test/resources/schema/test_union.avsc
 delete mode 100644 java/adapter/jdbc/pom.xml
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/ArrowVectorIterator.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/Constants.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcFieldInfo.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfig.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfigBuilder.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/ArrayConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BaseConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BigIntConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BinaryConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BitConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BlobConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/ClobConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/CompositeJdbcConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/DateConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/DecimalConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/DoubleConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/FloatConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/IntConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/JdbcConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/NullConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/SmallIntConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/TimeConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/TimestampConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/TimestampTZConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/TinyIntConsumer.java
 delete mode 100644 java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/VarCharConsumer.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/AbstractJdbcToArrowTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/JdbcFieldInfoTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfigTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/JdbcToArrowTestHelper.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/Table.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/consumer/AbstractConsumerTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/consumer/BinaryConsumerTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowArrayTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowCharSetTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowDataTypesTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowNullTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowTimeZoneTest.java
 delete mode 100644 java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowVectorIteratorTest.java
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_all_datatypes_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_all_datatypes_null_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_all_datatypes_selected_null_rows_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_bigint_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_binary_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_bit_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_blob_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_bool_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_char_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_charset_ch_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_charset_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_charset_jp_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_charset_kr_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_clob_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_date_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_decimal_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_double_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_est_date_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_est_time_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_est_timestamp_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_gmt_date_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_gmt_time_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_gmt_timestamp_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_int_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_null_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_pst_date_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_pst_time_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_pst_timestamp_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_real_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_selected_datatypes_null_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_smallint_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_time_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_timestamp_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_tinyint_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/h2/test1_varchar_h2.yml
 delete mode 100644 java/adapter/jdbc/src/test/resources/logback.xml
 delete mode 100644 java/adapter/orc/CMakeLists.txt
 delete mode 100644 java/adapter/orc/pom.xml
 delete mode 100644 java/adapter/orc/src/main/java/org/apache/arrow/adapter/orc/OrcFieldNode.java
 delete mode 100644 java/adapter/orc/src/main/java/org/apache/arrow/adapter/orc/OrcJniUtils.java
 delete mode 100644 java/adapter/orc/src/main/java/org/apache/arrow/adapter/orc/OrcMemoryJniWrapper.java
 delete mode 100644 java/adapter/orc/src/main/java/org/apache/arrow/adapter/orc/OrcReader.java
 delete mode 100644 java/adapter/orc/src/main/java/org/apache/arrow/adapter/orc/OrcReaderJniWrapper.java
 delete mode 100644 java/adapter/orc/src/main/java/org/apache/arrow/adapter/orc/OrcRecordBatch.java
 delete mode 100644 java/adapter/orc/src/main/java/org/apache/arrow/adapter/orc/OrcReferenceManager.java
 delete mode 100644 java/adapter/orc/src/main/java/org/apache/arrow/adapter/orc/OrcStripeReader.java
 delete mode 100644 java/adapter/orc/src/main/java/org/apache/arrow/adapter/orc/OrcStripeReaderJniWrapper.java
 delete mode 100644 java/adapter/orc/src/test/java/org/apache/arrow/adapter/orc/OrcReaderTest.java
 delete mode 100644 java/algorithm/pom.xml
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/deduplicate/DeduplicationUtils.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/deduplicate/VectorRunDeduplicator.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/dictionary/DictionaryBuilder.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/dictionary/DictionaryEncoder.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/dictionary/HashTableBasedDictionaryBuilder.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/dictionary/HashTableDictionaryEncoder.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/dictionary/LinearDictionaryEncoder.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/dictionary/SearchDictionaryEncoder.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/dictionary/SearchTreeBasedDictionaryBuilder.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/misc/PartialSumUtils.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/rank/VectorRank.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/search/ParallelSearcher.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/search/VectorRangeSearcher.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/search/VectorSearcher.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/CompositeVectorComparator.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/DefaultVectorComparators.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/FixedWidthInPlaceVectorSorter.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/FixedWidthOutOfPlaceVectorSorter.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/InPlaceVectorSorter.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/IndexSorter.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/InsertionSorter.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/OffHeapIntStack.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/OutOfPlaceVectorSorter.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/StableVectorComparator.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/VariableWidthOutOfPlaceVectorSorter.java
 delete mode 100644 java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/VectorValueComparator.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/deduplicate/TestDeduplicationUtils.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/deduplicate/TestVectorRunDeduplicator.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/dictionary/TestHashTableBasedDictionaryBuilder.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/dictionary/TestHashTableDictionaryEncoder.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/dictionary/TestLinearDictionaryEncoder.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/dictionary/TestSearchDictionaryEncoder.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/dictionary/TestSearchTreeBasedDictionaryBuilder.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/misc/TestPartialSumUtils.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/rank/TestVectorRank.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/search/TestParallelSearcher.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/search/TestVectorRangeSearcher.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/search/TestVectorSearcher.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestCompositeVectorComparator.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestDefaultVectorComparator.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestFixedWidthInPlaceVectorSorter.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestFixedWidthOutOfPlaceVectorSorter.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestFixedWidthSorting.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestIndexSorter.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestInsertionSorter.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestOffHeapIntStack.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestSortingUtil.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestStableVectorComparator.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestVariableWidthOutOfPlaceVectorSorter.java
 delete mode 100644 java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestVariableWidthSorting.java
 delete mode 100644 java/api-changes.md
 delete mode 100644 java/compression/pom.xml
 delete mode 100644 java/compression/src/main/java/org/apache/arrow/compression/CommonsCompressionFactory.java
 delete mode 100644 java/compression/src/main/java/org/apache/arrow/compression/Lz4CompressionCodec.java
 delete mode 100644 java/compression/src/main/java/org/apache/arrow/compression/ZstdCompressionCodec.java
 delete mode 100644 java/compression/src/test/java/org/apache/arrow/compression/TestCompressionCodec.java
 delete mode 100644 java/dataset/CMakeLists.txt
 delete mode 100644 java/dataset/pom.xml
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/file/FileFormat.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/file/FileSystemDatasetFactory.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/file/JniWrapper.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/DirectReservationListener.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/JniLoader.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/JniWrapper.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/NativeContext.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/NativeDataset.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/NativeDatasetFactory.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/NativeInstanceReleasedException.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/NativeMemoryPool.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/NativeRecordBatchHandle.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/NativeScanTask.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/NativeScanner.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/jni/ReservationListener.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/scanner/ScanOptions.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/scanner/ScanTask.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/scanner/Scanner.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/source/Dataset.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/dataset/source/DatasetFactory.java
 delete mode 100644 java/dataset/src/main/java/org/apache/arrow/memory/NativeUnderlyingMemory.java
 delete mode 100644 java/dataset/src/test/java/org/apache/arrow/dataset/ParquetWriteSupport.java
 delete mode 100644 java/dataset/src/test/java/org/apache/arrow/dataset/TestDataset.java
 delete mode 100644 java/dataset/src/test/java/org/apache/arrow/dataset/file/TestFileSystemDataset.java
 delete mode 100644 java/dataset/src/test/java/org/apache/arrow/dataset/file/TestFileSystemDatasetFactory.java
 delete mode 100644 java/dataset/src/test/java/org/apache/arrow/dataset/jni/TestNativeDataset.java
 delete mode 100644 java/dataset/src/test/java/org/apache/arrow/dataset/jni/TestReservationListener.java
 delete mode 100644 java/dataset/src/test/java/org/apache/arrow/memory/TestNativeUnderlyingMemory.java
 delete mode 100644 java/dataset/src/test/resources/avroschema/user.avsc
 delete mode 100644 java/dev/checkstyle/checkstyle.license
 delete mode 100644 java/dev/checkstyle/checkstyle.xml
 delete mode 100644 java/dev/checkstyle/suppressions.xml
 delete mode 100644 java/flight/flight-core/README.md
 delete mode 100644 java/flight/flight-core/pom.xml
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/Action.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/ActionType.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/ArrowMessage.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/AsyncPutListener.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/BackpressureStrategy.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/CallHeaders.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/CallInfo.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/CallOption.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/CallOptions.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/CallStatus.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/Criteria.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/DictionaryUtils.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/ErrorFlightMetadata.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightBindingService.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightCallHeaders.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightClient.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightClientMiddleware.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightConstants.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightDescriptor.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightEndpoint.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightInfo.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightMethod.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightProducer.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightRuntimeException.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightServer.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightServerMiddleware.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightService.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightStatusCode.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightStream.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/HeaderCallOption.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/Location.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/LocationSchemes.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/NoOpFlightProducer.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/NoOpStreamListener.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/OutboundStreamListener.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/OutboundStreamListenerImpl.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/PutResult.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/RequestContext.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/Result.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/SchemaResult.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/ServerHeaderMiddleware.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/StreamPipe.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/SyncPutListener.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/Ticket.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth/AuthConstants.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth/BasicClientAuthHandler.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth/BasicServerAuthHandler.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth/ClientAuthHandler.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth/ClientAuthInterceptor.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth/ClientAuthWrapper.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth/ServerAuthHandler.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth/ServerAuthInterceptor.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth/ServerAuthWrapper.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/Auth2Constants.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/AuthUtilities.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/BasicAuthCredentialWriter.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/BasicCallHeaderAuthenticator.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/BearerCredentialWriter.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/BearerTokenAuthenticator.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/CallHeaderAuthenticator.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/ClientBearerHeaderHandler.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/ClientHandshakeWrapper.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/ClientHeaderHandler.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/ClientIncomingAuthHeaderMiddleware.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/GeneratedBearerTokenAuthenticator.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/ServerCallHeaderAuthMiddleware.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/client/ClientCookieMiddleware.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/ExampleFlightServer.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/ExampleTicket.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/FlightHolder.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/InMemoryStore.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/Stream.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/integration/AuthBasicProtoScenario.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/integration/IntegrationAssertions.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/integration/IntegrationTestClient.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/integration/IntegrationTestServer.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/integration/MiddlewareScenario.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/integration/Scenario.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/example/integration/Scenarios.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/grpc/AddWritableBuffer.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/grpc/CallCredentialAdapter.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/grpc/ClientInterceptorAdapter.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/grpc/ContextPropagatingExecutorService.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/grpc/CredentialCallOption.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/grpc/GetReadableBuffer.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/grpc/MetadataAdapter.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/grpc/RequestContextAdapter.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/grpc/ServerInterceptorAdapter.java
 delete mode 100644 java/flight/flight-core/src/main/java/org/apache/arrow/flight/grpc/StatusUtils.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/FlightTestUtil.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestApplicationMetadata.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestAuth.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestBackPressure.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestBasicOperation.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestCallOptions.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestClientMiddleware.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestDictionaryUtils.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestDoExchange.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestErrorMetadata.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestFlightClient.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestFlightService.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestLargeMessage.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestLeak.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestMetadataVersion.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestServerMiddleware.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestServerOptions.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestTls.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/auth/TestBasicAuth.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/auth2/TestBasicAuth2.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/client/TestCookieHandling.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/example/TestExampleServer.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/perf/PerformanceTestServer.java
 delete mode 100644 java/flight/flight-core/src/test/java/org/apache/arrow/flight/perf/TestPerf.java
 delete mode 100644 java/flight/flight-core/src/test/protobuf/perf.proto
 delete mode 100644 java/flight/flight-core/src/test/resources/logback.xml
 delete mode 100644 java/flight/flight-grpc/pom.xml
 delete mode 100644 java/flight/flight-grpc/src/main/java/org/apache/arrow/flight/FlightGrpcUtils.java
 delete mode 100644 java/flight/flight-grpc/src/test/java/org/apache/arrow/flight/TestFlightGrpcUtils.java
 delete mode 100644 java/flight/flight-grpc/src/test/protobuf/test.proto
 delete mode 100644 java/format/pom.xml
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Binary.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Block.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/BodyCompression.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/BodyCompressionMethod.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Bool.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Buffer.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/CompressionType.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Date.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/DateUnit.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Decimal.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/DictionaryBatch.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/DictionaryEncoding.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/DictionaryKind.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Duration.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Endianness.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Feature.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Field.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/FieldNode.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/FixedSizeBinary.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/FixedSizeList.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/FloatingPoint.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Footer.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Int.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Interval.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/IntervalUnit.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/KeyValue.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/LargeBinary.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/LargeList.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/LargeUtf8.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/List.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Map.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Message.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/MessageHeader.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/MetadataVersion.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Null.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Precision.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/RecordBatch.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Schema.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/SparseMatrixCompressedAxis.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/SparseMatrixIndexCSX.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/SparseTensor.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/SparseTensorIndex.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/SparseTensorIndexCOO.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/SparseTensorIndexCSF.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Struct_.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Tensor.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/TensorDim.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Time.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/TimeUnit.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Timestamp.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Type.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Union.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/UnionMode.java
 delete mode 100644 java/format/src/main/java/org/apache/arrow/flatbuf/Utf8.java
 delete mode 100644 java/gandiva/CMakeLists.txt
 delete mode 100644 java/gandiva/README.md
 delete mode 100644 java/gandiva/pom.xml
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/ConfigurationBuilder.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/DecimalTypeUtil.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/ExpressionRegistry.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/ExpressionRegistryJniHelper.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/Filter.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/FunctionSignature.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/JniLoader.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/JniWrapper.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/Projector.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/SelectionVector.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/SelectionVectorInt16.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/SelectionVectorInt32.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/evaluator/VectorExpander.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/exceptions/EvaluatorClosedException.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/exceptions/GandivaException.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/exceptions/UnsupportedTypeException.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/AndNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/ArrowTypeHelper.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/BinaryNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/BooleanNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/Condition.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/DecimalNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/DoubleNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/ExpressionTree.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/FieldNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/FloatNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/FunctionNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/IfNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/InNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/IntNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/LongNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/NullNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/OrNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/StringNode.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/TreeBuilder.java
 delete mode 100644 java/gandiva/src/main/java/org/apache/arrow/gandiva/expression/TreeNode.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/evaluator/BaseEvaluatorTest.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/evaluator/DecimalTypeUtilTest.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/evaluator/ExpressionRegistryTest.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/evaluator/FilterProjectTest.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/evaluator/FilterTest.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/evaluator/MicroBenchmarkTest.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/evaluator/ProjectorDecimalTest.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/evaluator/ProjectorTest.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/evaluator/TestJniLoader.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/expression/ArrowTypeHelperTest.java
 delete mode 100644 java/gandiva/src/test/java/org/apache/arrow/gandiva/expression/TreeBuilderTest.java
 delete mode 100644 java/gandiva/src/test/resources/logback.xml
 delete mode 100644 java/memory/memory-core/pom.xml
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/Accountant.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/AllocationListener.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/AllocationManager.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/AllocationOutcome.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/AllocationOutcomeDetails.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/AllocationReservation.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/AllocatorClosedException.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/ArrowBuf.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/BaseAllocator.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/BoundsChecking.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/BufferAllocator.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/BufferLedger.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/BufferManager.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/CheckAllocator.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/ChildAllocator.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/DefaultAllocationManagerOption.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/LowCostIdentityHashMap.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/OutOfMemoryException.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/OwnershipTransferNOOP.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/OwnershipTransferResult.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/README.md
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/ReferenceManager.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/RootAllocator.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/ValueWithKeyIncluded.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/package-info.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/rounding/DefaultRoundingPolicy.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/rounding/RoundingPolicy.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/rounding/SegmentRoundingPolicy.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/ArrowBufPointer.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/AssertionUtil.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/AutoCloseableLock.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/ByteFunctionHelpers.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/CommonUtil.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/HistoricalLog.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/LargeMemoryUtil.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/MemoryUtil.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/StackTrace.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/hash/ArrowBufHasher.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/hash/MurmurHasher.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/memory/util/hash/SimpleHasher.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/util/AutoCloseables.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/util/Collections2.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/util/Preconditions.java
 delete mode 100644 java/memory/memory-core/src/main/java/org/apache/arrow/util/VisibleForTesting.java
 delete mode 100644 java/memory/memory-core/src/test/java/org/apache/arrow/memory/DefaultAllocationManagerFactory.java
 delete mode 100644 java/memory/memory-core/src/test/java/org/apache/arrow/memory/TestAccountant.java
 delete mode 100644 java/memory/memory-core/src/test/java/org/apache/arrow/memory/TestAllocationManager.java
 delete mode 100644 java/memory/memory-core/src/test/java/org/apache/arrow/memory/TestArrowBuf.java
 delete mode 100644 java/memory/memory-core/src/test/java/org/apache/arrow/memory/TestBoundaryChecking.java
 delete mode 100644 java/memory/memory-core/src/test/java/org/apache/arrow/memory/TestLowCostIdentityHashMap.java
 delete mode 100644 java/memory/memory-core/src/test/java/org/apache/arrow/memory/util/TestArrowBufPointer.java
 delete mode 100644 java/memory/memory-core/src/test/java/org/apache/arrow/memory/util/TestByteFunctionHelpers.java
 delete mode 100755 java/memory/memory-core/src/test/java/org/apache/arrow/memory/util/TestLargeMemoryUtil.java
 delete mode 100644 java/memory/memory-core/src/test/java/org/apache/arrow/memory/util/hash/TestArrowBufHasher.java
 delete mode 100644 java/memory/memory-netty/pom.xml
 delete mode 100644 java/memory/memory-netty/src/main/java/io/netty/buffer/ExpandableByteBuf.java
 delete mode 100644 java/memory/memory-netty/src/main/java/io/netty/buffer/LargeBuffer.java
 delete mode 100644 java/memory/memory-netty/src/main/java/io/netty/buffer/MutableWrappedByteBuf.java
 delete mode 100644 java/memory/memory-netty/src/main/java/io/netty/buffer/NettyArrowBuf.java
 delete mode 100644 java/memory/memory-netty/src/main/java/io/netty/buffer/PooledByteBufAllocatorL.java
 delete mode 100644 java/memory/memory-netty/src/main/java/io/netty/buffer/UnsafeDirectLittleEndian.java
 delete mode 100644 java/memory/memory-netty/src/main/java/org/apache/arrow/memory/ArrowByteBufAllocator.java
 delete mode 100644 java/memory/memory-netty/src/main/java/org/apache/arrow/memory/DefaultAllocationManagerFactory.java
 delete mode 100644 java/memory/memory-netty/src/main/java/org/apache/arrow/memory/NettyAllocationManager.java
 delete mode 100644 java/memory/memory-netty/src/test/java/io/netty/buffer/TestNettyArrowBuf.java
 delete mode 100644 java/memory/memory-netty/src/test/java/io/netty/buffer/TestUnsafeDirectLittleEndian.java
 delete mode 100644 java/memory/memory-netty/src/test/java/org/apache/arrow/memory/ITTestLargeArrowBuf.java
 delete mode 100644 java/memory/memory-netty/src/test/java/org/apache/arrow/memory/TestAllocationManagerNetty.java
 delete mode 100644 java/memory/memory-netty/src/test/java/org/apache/arrow/memory/TestBaseAllocator.java
 delete mode 100644 java/memory/memory-netty/src/test/java/org/apache/arrow/memory/TestEmptyArrowBuf.java
 delete mode 100644 java/memory/memory-netty/src/test/java/org/apache/arrow/memory/TestEndianness.java
 delete mode 100644 java/memory/memory-netty/src/test/java/org/apache/arrow/memory/TestNettyAllocationManager.java
 delete mode 100644 java/memory/memory-netty/src/test/resources/logback.xml
 delete mode 100644 java/memory/memory-unsafe/pom.xml
 delete mode 100644 java/memory/memory-unsafe/src/main/java/org/apache/arrow/memory/DefaultAllocationManagerFactory.java
 delete mode 100644 java/memory/memory-unsafe/src/main/java/org/apache/arrow/memory/UnsafeAllocationManager.java
 delete mode 100644 java/memory/memory-unsafe/src/test/java/org/apache/arrow/memory/TestAllocationManagerUnsafe.java
 delete mode 100644 java/memory/memory-unsafe/src/test/java/org/apache/arrow/memory/TestUnsafeAllocationManager.java
 delete mode 100644 java/memory/pom.xml
 delete mode 100644 java/memory/src/test/java/io/netty/buffer/TestExpandableByteBuf.java
 delete mode 100644 java/performance/pom.xml
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/adapter/AvroAdapterBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/adapter/jdbc/JdbcAdapterBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/algorithm/search/ParallelSearcherBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/memory/AllocatorBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/memory/ArrowBufBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/memory/util/ArrowBufPointerBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/memory/util/ByteFunctionHelpersBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/BaseValueVectorBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/BitVectorHelperBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/DecimalVectorBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/Float8Benchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/FloatingPointBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/IntBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/VarCharBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/VariableWidthVectorBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/VectorLoaderBenchmark.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/VectorUnloaderBenchmark.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/dictionary/DictionaryEncoderBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/ipc/WriteChannelBenchmark.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/ipc/message/ArrowRecordBatchBenchmarks.java
 delete mode 100644 java/performance/src/test/java/org/apache/arrow/vector/util/TransferPairBenchmarks.java
 delete mode 100644 java/plasma/README.md
 delete mode 100644 java/plasma/pom.xml
 delete mode 100644 java/plasma/src/main/java/org/apache/arrow/plasma/ObjectStoreLink.java
 delete mode 100644 java/plasma/src/main/java/org/apache/arrow/plasma/PlasmaClient.java
 delete mode 100644 java/plasma/src/main/java/org/apache/arrow/plasma/PlasmaClientJNI.java
 delete mode 100644 java/plasma/src/main/java/org/apache/arrow/plasma/exceptions/DuplicateObjectException.java
 delete mode 100644 java/plasma/src/main/java/org/apache/arrow/plasma/exceptions/PlasmaClientException.java
 delete mode 100644 java/plasma/src/main/java/org/apache/arrow/plasma/exceptions/PlasmaOutOfMemoryException.java
 delete mode 100644 java/plasma/src/test/java/org/apache/arrow/plasma/PlasmaClientTest.java
 delete mode 100644 java/plasma/src/test/resources/logback.xml
 delete mode 100755 java/plasma/test.sh
 delete mode 100644 java/pom.xml
 delete mode 100644 java/tools/pom.xml
 delete mode 100644 java/tools/src/main/java/org/apache/arrow/tools/EchoServer.java
 delete mode 100644 java/tools/src/main/java/org/apache/arrow/tools/FileRoundtrip.java
 delete mode 100644 java/tools/src/main/java/org/apache/arrow/tools/FileToStream.java
 delete mode 100644 java/tools/src/main/java/org/apache/arrow/tools/Integration.java
 delete mode 100644 java/tools/src/main/java/org/apache/arrow/tools/StreamToFile.java
 delete mode 100644 java/tools/src/test/java/org/apache/arrow/tools/ArrowFileTestFixtures.java
 delete mode 100644 java/tools/src/test/java/org/apache/arrow/tools/EchoServerTest.java
 delete mode 100644 java/tools/src/test/java/org/apache/arrow/tools/TestFileRoundtrip.java
 delete mode 100644 java/tools/src/test/java/org/apache/arrow/tools/TestIntegration.java
 delete mode 100644 java/tools/src/test/resources/logback.xml
 delete mode 100644 java/vector/pom.xml
 delete mode 100644 java/vector/src/main/codegen/config.fmpp
 delete mode 100644 java/vector/src/main/codegen/data/ArrowTypes.tdd
 delete mode 100644 java/vector/src/main/codegen/data/ValueVectorTypes.tdd
 delete mode 100644 java/vector/src/main/codegen/includes/license.ftl
 delete mode 100644 java/vector/src/main/codegen/includes/vv_imports.ftl
 delete mode 100644 java/vector/src/main/codegen/templates/AbstractFieldReader.java
 delete mode 100644 java/vector/src/main/codegen/templates/AbstractFieldWriter.java
 delete mode 100644 java/vector/src/main/codegen/templates/AbstractPromotableFieldWriter.java
 delete mode 100644 java/vector/src/main/codegen/templates/ArrowType.java
 delete mode 100644 java/vector/src/main/codegen/templates/BaseReader.java
 delete mode 100644 java/vector/src/main/codegen/templates/BaseWriter.java
 delete mode 100644 java/vector/src/main/codegen/templates/CaseSensitiveStructWriters.java
 delete mode 100644 java/vector/src/main/codegen/templates/ComplexCopier.java
 delete mode 100644 java/vector/src/main/codegen/templates/ComplexReaders.java
 delete mode 100644 java/vector/src/main/codegen/templates/ComplexWriters.java
 delete mode 100644 java/vector/src/main/codegen/templates/DenseUnionReader.java
 delete mode 100644 java/vector/src/main/codegen/templates/DenseUnionVector.java
 delete mode 100644 java/vector/src/main/codegen/templates/DenseUnionWriter.java
 delete mode 100644 java/vector/src/main/codegen/templates/HolderReaderImpl.java
 delete mode 100644 java/vector/src/main/codegen/templates/NullReader.java
 delete mode 100644 java/vector/src/main/codegen/templates/StructWriters.java
 delete mode 100644 java/vector/src/main/codegen/templates/UnionFixedSizeListWriter.java
 delete mode 100644 java/vector/src/main/codegen/templates/UnionListWriter.java
 delete mode 100644 java/vector/src/main/codegen/templates/UnionMapWriter.java
 delete mode 100644 java/vector/src/main/codegen/templates/UnionReader.java
 delete mode 100644 java/vector/src/main/codegen/templates/UnionVector.java
 delete mode 100644 java/vector/src/main/codegen/templates/UnionWriter.java
 delete mode 100644 java/vector/src/main/codegen/templates/ValueHolders.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/AddOrGetResult.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/AllocationHelper.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/BaseFixedWidthVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/BaseIntVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/BaseLargeVariableWidthVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/BaseValueVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/BaseVariableWidthVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/BigIntVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/BitVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/BitVectorHelper.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/BufferBacked.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/BufferLayout.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/DateDayVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/DateMilliVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/Decimal256Vector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/DecimalVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/DensityAwareVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/DurationVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ElementAddressableVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ExtensionTypeVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/FieldVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/FixedSizeBinaryVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/FixedWidthVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/Float4Vector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/Float8Vector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/FloatingPointVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/GenerateSampleData.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/IntVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/IntervalDayVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/IntervalYearVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/LargeVarBinaryVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/LargeVarCharVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/NullCheckingForGet.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/NullVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/SchemaChangeCallBack.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/SmallIntVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeMicroVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeMilliVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeNanoVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeSecVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeStampMicroTZVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeStampMicroVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeStampMilliTZVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeStampMilliVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeStampNanoTZVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeStampNanoVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeStampSecTZVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeStampSecVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TimeStampVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TinyIntVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/TypeLayout.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/UInt1Vector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/UInt2Vector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/UInt4Vector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/UInt8Vector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ValueVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/VarBinaryVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/VarCharVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/VariableWidthVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/VectorDefinitionSetter.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/VectorLoader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/VectorSchemaRoot.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/VectorUnloader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ZeroVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compare/ApproxEqualsVisitor.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compare/Range.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compare/RangeEqualsVisitor.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compare/TypeEqualsVisitor.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compare/VectorEqualsVisitor.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compare/VectorValueEqualizer.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compare/VectorVisitor.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compare/util/ValueEpsilonEqualizers.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/AbstractContainerVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/AbstractStructVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/BaseListVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/BaseRepeatedValueVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/EmptyValuePopulator.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/FixedSizeListVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/LargeListVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/ListVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/MapVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/NonNullableStructVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/Positionable.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/PromotableVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/RepeatedFixedWidthVectorLike.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/RepeatedValueVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/RepeatedVariableWidthVectorLike.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/StateTool.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/StructVector.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/VectorWithOrdinal.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/AbstractBaseReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/AbstractBaseWriter.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/ComplexWriterImpl.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/NullableStructReaderImpl.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/NullableStructWriterFactory.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/PromotableWriter.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/SingleListReaderImpl.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/SingleStructReaderImpl.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/StructOrListWriterImpl.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/UnionFixedSizeListReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/UnionLargeListReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/UnionListReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/impl/UnionMapReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/reader/FieldReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/complex/writer/FieldWriter.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compression/AbstractCompressionCodec.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compression/CompressionCodec.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compression/CompressionUtil.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/compression/NoCompressionCodec.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/dictionary/Dictionary.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/dictionary/DictionaryEncoder.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/dictionary/DictionaryHashTable.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/dictionary/DictionaryProvider.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/dictionary/ListSubfieldEncoder.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/dictionary/StructSubfieldEncoder.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/holders/ComplexHolder.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/holders/DenseUnionHolder.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/holders/RepeatedListHolder.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/holders/RepeatedStructHolder.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/holders/UnionHolder.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/holders/ValueHolder.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/ArrowFileReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/ArrowFileWriter.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/ArrowMagic.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/ArrowReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/ArrowStreamReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/ArrowStreamWriter.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/ArrowWriter.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/InvalidArrowFileException.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/JsonFileReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/JsonFileWriter.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/ReadChannel.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/SeekableReadChannel.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/WriteChannel.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/ArrowBlock.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/ArrowBodyCompression.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/ArrowBuffer.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/ArrowDictionaryBatch.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/ArrowFieldNode.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/ArrowFooter.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/ArrowMessage.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/ArrowRecordBatch.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/FBSerializable.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/FBSerializables.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/IpcOption.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/MessageChannelReader.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/MessageMetadataResult.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/MessageResult.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/ipc/message/MessageSerializer.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/DateUnit.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/FloatingPointPrecision.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/IntervalUnit.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/MetadataVersion.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/TimeUnit.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/Types.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/UnionMode.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/pojo/DictionaryEncoding.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/pojo/ExtensionTypeRegistry.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/pojo/Field.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/pojo/FieldType.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/types/pojo/Schema.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/ByteArrayReadableSeekableByteChannel.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/ByteFunctionHelpers.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/CallBack.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/DataSizeRoundingUtil.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/DateUtility.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/DecimalUtility.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/DictionaryUtility.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/ElementAddressableVectorIterator.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/JsonStringArrayList.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/JsonStringHashMap.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/MapWithOrdinal.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/MapWithOrdinalImpl.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/MultiMapWithOrdinal.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/OversizedAllocationException.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/PromotableMultiMapWithOrdinal.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/SchemaChangeRuntimeException.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/SchemaUtility.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/Text.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/TransferPair.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/Validator.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/ValueVectorUtility.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/VectorAppender.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/VectorBatchAppender.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/util/VectorSchemaRootAppender.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/validate/MetadataV4UnionChecker.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/validate/ValidateUtil.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/validate/ValidateVectorBufferVisitor.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/validate/ValidateVectorDataVisitor.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/validate/ValidateVectorTypeVisitor.java
 delete mode 100644 java/vector/src/main/java/org/apache/arrow/vector/validate/ValidateVectorVisitor.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/util/TestSchemaUtil.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/DirtyRootAllocator.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ITTestLargeVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestBitVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestBitVectorHelper.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestBufferOwnershipTransfer.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestCopyFrom.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestDecimal256Vector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestDecimalVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestDenseUnionVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestDictionaryVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestDurationVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestFixedSizeBinaryVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestFixedSizeListVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestIntervalYearVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestLargeListVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestLargeVarBinaryVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestLargeVarCharVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestListVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestMapVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestNullCheckingForGet.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestOutOfMemoryForValueVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestOversizedAllocationForValueVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestSplitAndTransfer.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestStructVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestTypeLayout.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestUnionVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestUtils.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestValueVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestVarCharListVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestVectorAlloc.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestVectorReAlloc.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestVectorReset.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestVectorSchemaRoot.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/TestVectorUnloadLoad.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/compare/TestRangeEqualsVisitor.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/compare/TestTypeEqualsVisitor.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/complex/impl/TestComplexCopier.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/complex/impl/TestPromotableWriter.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/complex/writer/TestComplexWriter.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/BaseFileTest.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/ITTestIPCWithLargeArrowBuffers.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/MessageSerializerTest.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/TestArrowFile.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/TestArrowFooter.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/TestArrowReaderWriter.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/TestArrowStream.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/TestArrowStreamPipe.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/TestJSONFile.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/TestRoundTrip.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/TestUIntDictionaryRoundTrip.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/ipc/message/TestMessageMetadataResult.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/pojo/TestConvert.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/testing/RandomDataGenerator.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/testing/TestValueVectorPopulator.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/testing/ValueVectorDataPopulator.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/types/pojo/TestExtensionType.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/types/pojo/TestField.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/types/pojo/TestSchema.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/util/DecimalUtilityTest.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/util/TestDataSizeRoundingUtil.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/util/TestElementAddressableVectorIterator.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/util/TestMultiMapWithOrdinal.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/util/TestValidator.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/util/TestVectorAppender.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/util/TestVectorBatchAppender.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/util/TestVectorSchemaRootAppender.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/validate/TestValidateVector.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/validate/TestValidateVectorFull.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/validate/TestValidateVectorSchemaRoot.java
 delete mode 100644 java/vector/src/test/java/org/apache/arrow/vector/validate/TestValidateVectorTypeVisitor.java
 delete mode 100644 java/vector/src/test/resources/logback.xml
 delete mode 100644 js/.eslintignore
 delete mode 100644 js/.eslintrc.js
 delete mode 100644 js/.gitignore
 delete mode 100644 js/.npmrc
 delete mode 100644 js/.vscode/launch.json
 delete mode 100644 js/DEVELOP.md
 delete mode 100644 js/README.md
 delete mode 100755 js/bin/arrow2csv.js
 delete mode 100755 js/bin/file-to-stream.js
 delete mode 100755 js/bin/integration.js
 delete mode 100755 js/bin/json-to-arrow.js
 delete mode 100755 js/bin/print-buffer-alignment.js
 delete mode 100755 js/bin/stream-to-file.js
 delete mode 100644 js/examples/read_file.html
 delete mode 100644 js/gulp/argv.js
 delete mode 100644 js/gulp/arrow-task.js
 delete mode 100644 js/gulp/clean-task.js
 delete mode 100644 js/gulp/closure-task.js
 delete mode 100644 js/gulp/compile-task.js
 delete mode 100644 js/gulp/memoize-task.js
 delete mode 100644 js/gulp/minify-task.js
 delete mode 100644 js/gulp/package-task.js
 delete mode 100644 js/gulp/test-task.js
 delete mode 100644 js/gulp/typescript-task.js
 delete mode 100644 js/gulp/util.js
 delete mode 100644 js/gulpfile.js
 delete mode 100644 js/index.js
 delete mode 100644 js/index.mjs
 delete mode 100644 js/index.ts
 delete mode 100644 js/jest.config.js
 delete mode 100644 js/jest.coverage.config.js
 delete mode 100644 js/lerna.json
 delete mode 100755 js/npm-release.sh
 delete mode 100644 js/package.json
 delete mode 100644 js/perf/config.js
 delete mode 100644 js/perf/index.js
 delete mode 100644 js/perf/table_config.js
 delete mode 100644 js/src/Arrow.dom.ts
 delete mode 100644 js/src/Arrow.node.ts
 delete mode 100644 js/src/Arrow.ts
 delete mode 100644 js/src/bin/arrow2csv.ts
 delete mode 100644 js/src/builder.ts
 delete mode 100644 js/src/builder/binary.ts
 delete mode 100644 js/src/builder/bool.ts
 delete mode 100644 js/src/builder/buffer.ts
 delete mode 100644 js/src/builder/date.ts
 delete mode 100644 js/src/builder/decimal.ts
 delete mode 100644 js/src/builder/dictionary.ts
 delete mode 100644 js/src/builder/fixedsizebinary.ts
 delete mode 100644 js/src/builder/fixedsizelist.ts
 delete mode 100644 js/src/builder/float.ts
 delete mode 100644 js/src/builder/index.ts
 delete mode 100644 js/src/builder/int.ts
 delete mode 100644 js/src/builder/interval.ts
 delete mode 100644 js/src/builder/list.ts
 delete mode 100644 js/src/builder/map.ts
 delete mode 100644 js/src/builder/null.ts
 delete mode 100644 js/src/builder/run.ts
 delete mode 100644 js/src/builder/struct.ts
 delete mode 100644 js/src/builder/time.ts
 delete mode 100644 js/src/builder/timestamp.ts
 delete mode 100644 js/src/builder/union.ts
 delete mode 100644 js/src/builder/utf8.ts
 delete mode 100644 js/src/builder/valid.ts
 delete mode 100644 js/src/column.ts
 delete mode 100644 js/src/compute/dataframe.ts
 delete mode 100644 js/src/compute/predicate.ts
 delete mode 100644 js/src/data.ts
 delete mode 100644 js/src/enum.ts
 delete mode 100644 js/src/fb/.eslintrc.js
 delete mode 100644 js/src/fb/File.ts
 delete mode 100644 js/src/fb/Message.ts
 delete mode 100644 js/src/fb/Schema.ts
 delete mode 100644 js/src/interfaces.ts
 delete mode 100644 js/src/io/adapters.ts
 delete mode 100644 js/src/io/file.ts
 delete mode 100644 js/src/io/interfaces.ts
 delete mode 100644 js/src/io/node/builder.ts
 delete mode 100644 js/src/io/node/iterable.ts
 delete mode 100644 js/src/io/node/reader.ts
 delete mode 100644 js/src/io/node/writer.ts
 delete mode 100644 js/src/io/stream.ts
 delete mode 100644 js/src/io/whatwg/builder.ts
 delete mode 100644 js/src/io/whatwg/iterable.ts
 delete mode 100644 js/src/io/whatwg/reader.ts
 delete mode 100644 js/src/io/whatwg/writer.ts
 delete mode 100644 js/src/ipc/message.ts
 delete mode 100644 js/src/ipc/metadata/file.ts
 delete mode 100644 js/src/ipc/metadata/json.ts
 delete mode 100644 js/src/ipc/metadata/message.ts
 delete mode 100644 js/src/ipc/reader.ts
 delete mode 100644 js/src/ipc/writer.ts
 delete mode 100644 js/src/recordbatch.ts
 delete mode 100644 js/src/schema.ts
 delete mode 100644 js/src/table.ts
 delete mode 100644 js/src/type.ts
 delete mode 100644 js/src/util/args.ts
 delete mode 100644 js/src/util/bit.ts
 delete mode 100644 js/src/util/bn.ts
 delete mode 100644 js/src/util/buffer.ts
 delete mode 100644 js/src/util/compat.ts
 delete mode 100644 js/src/util/fn.ts
 delete mode 100644 js/src/util/int.ts
 delete mode 100644 js/src/util/math.ts
 delete mode 100644 js/src/util/pretty.ts
 delete mode 100644 js/src/util/recordbatch.ts
 delete mode 100644 js/src/util/utf8.ts
 delete mode 100644 js/src/util/vector.ts
 delete mode 100644 js/src/vector.ts
 delete mode 100644 js/src/vector/base.ts
 delete mode 100644 js/src/vector/binary.ts
 delete mode 100644 js/src/vector/bool.ts
 delete mode 100644 js/src/vector/chunked.ts
 delete mode 100644 js/src/vector/date.ts
 delete mode 100644 js/src/vector/decimal.ts
 delete mode 100644 js/src/vector/dictionary.ts
 delete mode 100644 js/src/vector/fixedsizebinary.ts
 delete mode 100644 js/src/vector/fixedsizelist.ts
 delete mode 100644 js/src/vector/float.ts
 delete mode 100644 js/src/vector/index.ts
 delete mode 100644 js/src/vector/int.ts
 delete mode 100644 js/src/vector/interval.ts
 delete mode 100644 js/src/vector/list.ts
 delete mode 100644 js/src/vector/map.ts
 delete mode 100644 js/src/vector/null.ts
 delete mode 100644 js/src/vector/row.ts
 delete mode 100644 js/src/vector/struct.ts
 delete mode 100644 js/src/vector/time.ts
 delete mode 100644 js/src/vector/timestamp.ts
 delete mode 100644 js/src/vector/union.ts
 delete mode 100644 js/src/vector/utf8.ts
 delete mode 100644 js/src/visitor.ts
 delete mode 100644 js/src/visitor/builderctor.ts
 delete mode 100644 js/src/visitor/bytewidth.ts
 delete mode 100644 js/src/visitor/get.ts
 delete mode 100644 js/src/visitor/indexof.ts
 delete mode 100644 js/src/visitor/iterator.ts
 delete mode 100644 js/src/visitor/jsontypeassembler.ts
 delete mode 100644 js/src/visitor/jsonvectorassembler.ts
 delete mode 100644 js/src/visitor/set.ts
 delete mode 100644 js/src/visitor/toarray.ts
 delete mode 100644 js/src/visitor/typeassembler.ts
 delete mode 100644 js/src/visitor/typecomparator.ts
 delete mode 100644 js/src/visitor/typector.ts
 delete mode 100644 js/src/visitor/vectorassembler.ts
 delete mode 100644 js/src/visitor/vectorctor.ts
 delete mode 100644 js/src/visitor/vectorloader.ts
 delete mode 100644 js/test/.eslintrc.js
 delete mode 100644 js/test/Arrow.ts
 delete mode 100644 js/test/data/tables.ts
 delete mode 100644 js/test/data/tables/generate.py
 delete mode 100644 js/test/generate-test-data.ts
 delete mode 100644 js/test/inference/column.ts
 delete mode 100644 js/test/inference/nested.ts
 delete mode 100644 js/test/inference/visitor/get.ts
 delete mode 100644 js/test/jest-extensions.ts
 delete mode 100644 js/test/tsconfig.coverage.json
 delete mode 100644 js/test/tsconfig.json
 delete mode 100644 js/test/unit/bit-tests.ts
 delete mode 100644 js/test/unit/builders/builder-tests.ts
 delete mode 100644 js/test/unit/builders/date-tests.ts
 delete mode 100644 js/test/unit/builders/dictionary-tests.ts
 delete mode 100644 js/test/unit/builders/int64-tests.ts
 delete mode 100644 js/test/unit/builders/primitive-tests.ts
 delete mode 100644 js/test/unit/builders/uint64-tests.ts
 delete mode 100644 js/test/unit/builders/utf8-tests.ts
 delete mode 100644 js/test/unit/builders/utils.ts
 delete mode 100644 js/test/unit/generated-data-tests.ts
 delete mode 100644 js/test/unit/generated-data-validators.ts
 delete mode 100644 js/test/unit/int-tests.ts
 delete mode 100644 js/test/unit/ipc/helpers.ts
 delete mode 100644 js/test/unit/ipc/message-reader-tests.ts
 delete mode 100644 js/test/unit/ipc/reader/file-reader-tests.ts
 delete mode 100644 js/test/unit/ipc/reader/from-inference-tests.ts
 delete mode 100644 js/test/unit/ipc/reader/json-reader-tests.ts
 delete mode 100644 js/test/unit/ipc/reader/stream-reader-tests.ts
 delete mode 100644 js/test/unit/ipc/reader/streams-dom-tests.ts
 delete mode 100644 js/test/unit/ipc/reader/streams-node-tests.ts
 delete mode 100644 js/test/unit/ipc/validate.ts
 delete mode 100644 js/test/unit/ipc/writer/file-writer-tests.ts
 delete mode 100644 js/test/unit/ipc/writer/json-writer-tests.ts
 delete mode 100644 js/test/unit/ipc/writer/stream-writer-tests.ts
 delete mode 100644 js/test/unit/ipc/writer/streams-dom-tests.ts
 delete mode 100644 js/test/unit/ipc/writer/streams-node-tests.ts
 delete mode 100644 js/test/unit/math-tests.ts
 delete mode 100644 js/test/unit/recordbatch/record-batch-tests.ts
 delete mode 100644 js/test/unit/table-tests.ts
 delete mode 100644 js/test/unit/table/assign-tests.ts
 delete mode 100644 js/test/unit/table/serialize-tests.ts
 delete mode 100644 js/test/unit/utils.ts
 delete mode 100644 js/test/unit/vector/bool-vector-tests.ts
 delete mode 100644 js/test/unit/vector/date-vector-tests.ts
 delete mode 100644 js/test/unit/vector/numeric-vector-tests.ts
 delete mode 100644 js/test/unit/vector/vector-tests.ts
 delete mode 100644 js/test/unit/visitor-tests.ts
 delete mode 100644 js/tsconfig.json
 delete mode 100644 js/tsconfig/tsconfig.base.json
 delete mode 100644 js/tsconfig/tsconfig.bin.cjs.json
 delete mode 100644 js/tsconfig/tsconfig.es2015.cjs.json
 delete mode 100644 js/tsconfig/tsconfig.es2015.cls.json
 delete mode 100644 js/tsconfig/tsconfig.es2015.esm.json
 delete mode 100644 js/tsconfig/tsconfig.es5.cjs.json
 delete mode 100644 js/tsconfig/tsconfig.es5.cls.json
 delete mode 100644 js/tsconfig/tsconfig.es5.esm.json
 delete mode 100644 js/tsconfig/tsconfig.esnext.cjs.json
 delete mode 100644 js/tsconfig/tsconfig.esnext.cls.json
 delete mode 100644 js/tsconfig/tsconfig.esnext.esm.json
 delete mode 100644 js/typedoc.js
 delete mode 100644 js/yarn.lock
 delete mode 100644 julia/Arrow/.gitignore
 delete mode 100644 julia/Arrow/LICENSE.md
 delete mode 100644 julia/Arrow/Project.toml
 delete mode 100644 julia/Arrow/README.md
 delete mode 100644 julia/Arrow/docs/.gitignore
 delete mode 100644 julia/Arrow/docs/Manifest.toml
 delete mode 100644 julia/Arrow/docs/Project.toml
 delete mode 100644 julia/Arrow/docs/make.jl
 delete mode 100644 julia/Arrow/docs/src/index.md
 delete mode 100644 julia/Arrow/docs/src/manual.md
 delete mode 100644 julia/Arrow/docs/src/reference.md
 delete mode 100644 julia/Arrow/src/Arrow.jl
 delete mode 100644 julia/Arrow/src/FlatBuffers/FlatBuffers.jl
 delete mode 100644 julia/Arrow/src/FlatBuffers/builder.jl
 delete mode 100644 julia/Arrow/src/FlatBuffers/table.jl
 delete mode 100644 julia/Arrow/src/arraytypes/arraytypes.jl
 delete mode 100644 julia/Arrow/src/arraytypes/bool.jl
 delete mode 100644 julia/Arrow/src/arraytypes/compressed.jl
 delete mode 100644 julia/Arrow/src/arraytypes/dictencoding.jl
 delete mode 100644 julia/Arrow/src/arraytypes/fixedsizelist.jl
 delete mode 100644 julia/Arrow/src/arraytypes/list.jl
 delete mode 100644 julia/Arrow/src/arraytypes/map.jl
 delete mode 100644 julia/Arrow/src/arraytypes/primitive.jl
 delete mode 100644 julia/Arrow/src/arraytypes/struct.jl
 delete mode 100644 julia/Arrow/src/arraytypes/unions.jl
 delete mode 100644 julia/Arrow/src/arrowtypes.jl
 delete mode 100644 julia/Arrow/src/eltypes.jl
 delete mode 100644 julia/Arrow/src/metadata/File.jl
 delete mode 100644 julia/Arrow/src/metadata/Flatbuf.jl
 delete mode 100644 julia/Arrow/src/metadata/Message.jl
 delete mode 100644 julia/Arrow/src/metadata/Schema.jl
 delete mode 100644 julia/Arrow/src/table.jl
 delete mode 100644 julia/Arrow/src/utils.jl
 delete mode 100644 julia/Arrow/src/write.jl
 delete mode 100644 julia/Arrow/test/arrowjson.jl
 delete mode 100644 julia/Arrow/test/arrowjson/datetime.json
 delete mode 100644 julia/Arrow/test/arrowjson/decimal.json
 delete mode 100644 julia/Arrow/test/arrowjson/dictionary.json
 delete mode 100644 julia/Arrow/test/arrowjson/dictionary_unsigned.json
 delete mode 100644 julia/Arrow/test/arrowjson/map.json
 delete mode 100644 julia/Arrow/test/arrowjson/nested.json
 delete mode 100644 julia/Arrow/test/arrowjson/primitive-empty.json
 delete mode 100644 julia/Arrow/test/arrowjson/primitive.json
 delete mode 100644 julia/Arrow/test/arrowjson/primitive_no_batches.json
 delete mode 100644 julia/Arrow/test/dates.jl
 delete mode 100644 julia/Arrow/test/integrationtest.jl
 delete mode 100644 julia/Arrow/test/pyarrow_roundtrip.jl
 delete mode 100644 julia/Arrow/test/runtests.jl
 delete mode 100644 julia/Arrow/test/testtables.jl
 delete mode 100644 matlab/.gitignore
 delete mode 100644 matlab/CMakeLists.txt
 delete mode 100644 matlab/README.md
 delete mode 100644 matlab/build_support/common_vars.m
 delete mode 100644 matlab/build_support/compile.m
 delete mode 100644 matlab/build_support/test.m
 delete mode 100644 matlab/doc/matlab_interface_for_apache_arrow_design.md
 delete mode 100644 matlab/src/+mlarrow/+util/createMetadataStruct.m
 delete mode 100644 matlab/src/+mlarrow/+util/createVariableStruct.m
 delete mode 100644 matlab/src/+mlarrow/+util/makeValidMATLABTableVariableNames.m
 delete mode 100644 matlab/src/+mlarrow/+util/table2mlarrow.m
 delete mode 100644 matlab/src/feather_reader.cc
 delete mode 100644 matlab/src/feather_reader.h
 delete mode 100644 matlab/src/feather_writer.cc
 delete mode 100644 matlab/src/feather_writer.h
 delete mode 100644 matlab/src/featherread.m
 delete mode 100644 matlab/src/featherreadmex.cc
 delete mode 100644 matlab/src/featherwrite.m
 delete mode 100644 matlab/src/featherwritemex.cc
 delete mode 100644 matlab/src/matlab_traits.h
 delete mode 100644 matlab/src/util/handle_status.cc
 delete mode 100644 matlab/src/util/handle_status.h
 delete mode 100644 matlab/src/util/unicode_conversion.cc
 delete mode 100644 matlab/src/util/unicode_conversion.h
 delete mode 100755 matlab/test/tfeather.m
 delete mode 100644 matlab/test/tfeathermex.m
 delete mode 100644 matlab/test/util/createTable.m
 delete mode 100644 matlab/test/util/createVariablesAndMetadataStructs.m
 delete mode 100644 matlab/test/util/featherMEXRoundTrip.m
 delete mode 100644 matlab/test/util/featherRoundTrip.m
 delete mode 100644 python/.coveragerc
 delete mode 100644 python/.flake8.cython
 delete mode 100644 python/.gitignore
 delete mode 100644 python/CMakeLists.txt
 delete mode 100644 python/MANIFEST.in
 delete mode 100644 python/README.md
 delete mode 100755 python/asv-build.sh
 delete mode 100755 python/asv-install.sh
 delete mode 100755 python/asv-uninstall.sh
 delete mode 100644 python/asv.conf.json
 delete mode 100644 python/benchmarks/__init__.py
 delete mode 100644 python/benchmarks/array_ops.py
 delete mode 100644 python/benchmarks/common.py
 delete mode 100644 python/benchmarks/convert_builtins.py
 delete mode 100644 python/benchmarks/convert_pandas.py
 delete mode 100644 python/benchmarks/io.py
 delete mode 100644 python/benchmarks/microbenchmarks.py
 delete mode 100644 python/benchmarks/parquet.py
 delete mode 100644 python/benchmarks/plasma.py
 delete mode 100644 python/benchmarks/streaming.py
 delete mode 120000 python/cmake_modules
 delete mode 100644 python/examples/flight/client.py
 delete mode 100644 python/examples/flight/middleware.py
 delete mode 100644 python/examples/flight/server.py
 delete mode 100644 python/examples/minimal_build/Dockerfile.fedora
 delete mode 100644 python/examples/minimal_build/Dockerfile.ubuntu
 delete mode 100644 python/examples/minimal_build/README.md
 delete mode 100755 python/examples/minimal_build/build_conda.sh
 delete mode 100755 python/examples/minimal_build/build_venv.sh
 delete mode 100644 python/examples/plasma/sorting/multimerge.pyx
 delete mode 100644 python/examples/plasma/sorting/setup.py
 delete mode 100644 python/examples/plasma/sorting/sort_df.py
 delete mode 100644 python/pyarrow/__init__.pxd
 delete mode 100644 python/pyarrow/__init__.py
 delete mode 100644 python/pyarrow/_compute.pxd
 delete mode 100644 python/pyarrow/_compute.pyx
 delete mode 100644 python/pyarrow/_csv.pxd
 delete mode 100644 python/pyarrow/_csv.pyx
 delete mode 100644 python/pyarrow/_cuda.pxd
 delete mode 100644 python/pyarrow/_cuda.pyx
 delete mode 100644 python/pyarrow/_dataset.pyx
 delete mode 100644 python/pyarrow/_flight.pyx
 delete mode 100644 python/pyarrow/_fs.pxd
 delete mode 100644 python/pyarrow/_fs.pyx
 delete mode 100644 python/pyarrow/_hdfs.pyx
 delete mode 100644 python/pyarrow/_json.pyx
 delete mode 100644 python/pyarrow/_orc.pxd
 delete mode 100644 python/pyarrow/_orc.pyx
 delete mode 100644 python/pyarrow/_parquet.pxd
 delete mode 100644 python/pyarrow/_parquet.pyx
 delete mode 100644 python/pyarrow/_plasma.pyx
 delete mode 100644 python/pyarrow/_s3fs.pyx
 delete mode 100644 python/pyarrow/array.pxi
 delete mode 100644 python/pyarrow/benchmark.pxi
 delete mode 100644 python/pyarrow/benchmark.py
 delete mode 100644 python/pyarrow/builder.pxi
 delete mode 100644 python/pyarrow/cffi.py
 delete mode 100644 python/pyarrow/compat.pxi
 delete mode 100644 python/pyarrow/compat.py
 delete mode 100644 python/pyarrow/compute.py
 delete mode 100644 python/pyarrow/config.pxi
 delete mode 100644 python/pyarrow/csv.py
 delete mode 100644 python/pyarrow/cuda.py
 delete mode 100644 python/pyarrow/dataset.py
 delete mode 100644 python/pyarrow/error.pxi
 delete mode 100644 python/pyarrow/feather.pxi
 delete mode 100644 python/pyarrow/feather.py
 delete mode 100644 python/pyarrow/filesystem.py
 delete mode 100644 python/pyarrow/flight.py
 delete mode 100644 python/pyarrow/fs.py
 delete mode 100644 python/pyarrow/gandiva.pyx
 delete mode 100644 python/pyarrow/hdfs.py
 delete mode 100644 python/pyarrow/includes/__init__.pxd
 delete mode 100644 python/pyarrow/includes/common.pxd
 delete mode 100644 python/pyarrow/includes/libarrow.pxd
 delete mode 100644 python/pyarrow/includes/libarrow_cuda.pxd
 delete mode 100644 python/pyarrow/includes/libarrow_dataset.pxd
 delete mode 100644 python/pyarrow/includes/libarrow_flight.pxd
 delete mode 100644 python/pyarrow/includes/libarrow_fs.pxd
 delete mode 100644 python/pyarrow/includes/libgandiva.pxd
 delete mode 100644 python/pyarrow/includes/libplasma.pxd
 delete mode 100644 python/pyarrow/io-hdfs.pxi
 delete mode 100644 python/pyarrow/io.pxi
 delete mode 100644 python/pyarrow/ipc.pxi
 delete mode 100644 python/pyarrow/ipc.py
 delete mode 100644 python/pyarrow/json.py
 delete mode 100644 python/pyarrow/jvm.py
 delete mode 100644 python/pyarrow/lib.pxd
 delete mode 100644 python/pyarrow/lib.pyx
 delete mode 100644 python/pyarrow/memory.pxi
 delete mode 100644 python/pyarrow/orc.py
 delete mode 100644 python/pyarrow/pandas-shim.pxi
 delete mode 100644 python/pyarrow/pandas_compat.py
 delete mode 100644 python/pyarrow/parquet.py
 delete mode 100644 python/pyarrow/plasma.py
 delete mode 100644 python/pyarrow/public-api.pxi
 delete mode 100644 python/pyarrow/scalar.pxi
 delete mode 100644 python/pyarrow/serialization.pxi
 delete mode 100644 python/pyarrow/serialization.py
 delete mode 100644 python/pyarrow/table.pxi
 delete mode 100644 python/pyarrow/tensor.pxi
 delete mode 100644 python/pyarrow/tensorflow/plasma_op.cc
 delete mode 100644 python/pyarrow/tests/__init__.py
 delete mode 100644 python/pyarrow/tests/arrow_7980.py
 delete mode 100644 python/pyarrow/tests/conftest.py
 delete mode 100644 python/pyarrow/tests/data/feather/v0.17.0.version=2-compression=lz4.feather
 delete mode 100644 python/pyarrow/tests/data/orc/README.md
 delete mode 100644 python/pyarrow/tests/data/orc/TestOrcFile.emptyFile.jsn.gz
 delete mode 100644 python/pyarrow/tests/data/orc/TestOrcFile.emptyFile.orc
 delete mode 100644 python/pyarrow/tests/data/orc/TestOrcFile.test1.jsn.gz
 delete mode 100644 python/pyarrow/tests/data/orc/TestOrcFile.test1.orc
 delete mode 100644 python/pyarrow/tests/data/orc/TestOrcFile.testDate1900.jsn.gz
 delete mode 100644 python/pyarrow/tests/data/orc/TestOrcFile.testDate1900.orc
 delete mode 100644 python/pyarrow/tests/data/orc/decimal.jsn.gz
 delete mode 100644 python/pyarrow/tests/data/orc/decimal.orc
 delete mode 100644 python/pyarrow/tests/data/parquet/v0.7.1.all-named-index.parquet
 delete mode 100644 python/pyarrow/tests/data/parquet/v0.7.1.column-metadata-handling.parquet
 delete mode 100644 python/pyarrow/tests/data/parquet/v0.7.1.parquet
 delete mode 100644 python/pyarrow/tests/data/parquet/v0.7.1.some-named-index.parquet
 delete mode 100644 python/pyarrow/tests/deserialize_buffer.py
 delete mode 100644 python/pyarrow/tests/pandas_examples.py
 delete mode 100644 python/pyarrow/tests/pandas_threaded_import.py
 delete mode 100644 python/pyarrow/tests/parquet/common.py
 delete mode 100644 python/pyarrow/tests/parquet/conftest.py
 delete mode 100644 python/pyarrow/tests/parquet/test_basic.py
 delete mode 100644 python/pyarrow/tests/parquet/test_compliant_nested_type.py
 delete mode 100644 python/pyarrow/tests/parquet/test_data_types.py
 delete mode 100644 python/pyarrow/tests/parquet/test_dataset.py
 delete mode 100644 python/pyarrow/tests/parquet/test_datetime.py
 delete mode 100644 python/pyarrow/tests/parquet/test_metadata.py
 delete mode 100644 python/pyarrow/tests/parquet/test_pandas.py
 delete mode 100644 python/pyarrow/tests/parquet/test_parquet_file.py
 delete mode 100644 python/pyarrow/tests/parquet/test_parquet_writer.py
 delete mode 100644 python/pyarrow/tests/pyarrow_cython_example.pyx
 delete mode 100644 python/pyarrow/tests/strategies.py
 delete mode 100644 python/pyarrow/tests/test_adhoc_memory_leak.py
 delete mode 100644 python/pyarrow/tests/test_array.py
 delete mode 100644 python/pyarrow/tests/test_builder.py
 delete mode 100644 python/pyarrow/tests/test_cffi.py
 delete mode 100644 python/pyarrow/tests/test_compute.py
 delete mode 100644 python/pyarrow/tests/test_convert_builtin.py
 delete mode 100644 python/pyarrow/tests/test_csv.py
 delete mode 100644 python/pyarrow/tests/test_cuda.py
 delete mode 100644 python/pyarrow/tests/test_cuda_numba_interop.py
 delete mode 100644 python/pyarrow/tests/test_cython.py
 delete mode 100644 python/pyarrow/tests/test_dataset.py
 delete mode 100644 python/pyarrow/tests/test_deprecations.py
 delete mode 100644 python/pyarrow/tests/test_extension_type.py
 delete mode 100644 python/pyarrow/tests/test_feather.py
 delete mode 100644 python/pyarrow/tests/test_filesystem.py
 delete mode 100644 python/pyarrow/tests/test_flight.py
 delete mode 100644 python/pyarrow/tests/test_fs.py
 delete mode 100644 python/pyarrow/tests/test_gandiva.py
 delete mode 100644 python/pyarrow/tests/test_hdfs.py
 delete mode 100644 python/pyarrow/tests/test_io.py
 delete mode 100644 python/pyarrow/tests/test_ipc.py
 delete mode 100644 python/pyarrow/tests/test_json.py
 delete mode 100644 python/pyarrow/tests/test_jvm.py
 delete mode 100644 python/pyarrow/tests/test_memory.py
 delete mode 100644 python/pyarrow/tests/test_misc.py
 delete mode 100644 python/pyarrow/tests/test_orc.py
 delete mode 100644 python/pyarrow/tests/test_pandas.py
 delete mode 100644 python/pyarrow/tests/test_plasma.py
 delete mode 100644 python/pyarrow/tests/test_plasma_tf_op.py
 delete mode 100644 python/pyarrow/tests/test_scalars.py
 delete mode 100644 python/pyarrow/tests/test_schema.py
 delete mode 100644 python/pyarrow/tests/test_serialization.py
 delete mode 100644 python/pyarrow/tests/test_serialization_deprecated.py
 delete mode 100644 python/pyarrow/tests/test_sparse_tensor.py
 delete mode 100644 python/pyarrow/tests/test_strategies.py
 delete mode 100644 python/pyarrow/tests/test_table.py
 delete mode 100644 python/pyarrow/tests/test_tensor.py
 delete mode 100644 python/pyarrow/tests/test_types.py
 delete mode 100644 python/pyarrow/tests/util.py
 delete mode 100644 python/pyarrow/types.pxi
 delete mode 100644 python/pyarrow/types.py
 delete mode 100644 python/pyarrow/util.py
 delete mode 100644 python/pyarrow/vendored/__init__.py
 delete mode 100644 python/pyarrow/vendored/version.py
 delete mode 100644 python/pyproject.toml
 delete mode 100644 python/requirements-build.txt
 delete mode 100644 python/requirements-test.txt
 delete mode 100644 python/requirements-wheel-build.txt
 delete mode 100644 python/requirements-wheel-test.txt
 delete mode 100644 python/scripts/test_imports.py
 delete mode 100644 python/scripts/test_leak.py
 delete mode 100644 python/setup.cfg
 delete mode 100755 python/setup.py
 delete mode 100644 r/.Rbuildignore
 delete mode 100644 r/.gitignore
 delete mode 100644 r/DESCRIPTION
 delete mode 100644 r/Makefile
 delete mode 100644 r/NAMESPACE
 delete mode 100644 r/NEWS.md
 delete mode 100644 r/R/array-data.R
 delete mode 100644 r/R/array.R
 delete mode 100644 r/R/arrow-datum.R
 delete mode 100644 r/R/arrow-package.R
 delete mode 100644 r/R/arrow-tabular.R
 delete mode 100644 r/R/arrowExports.R
 delete mode 100644 r/R/buffer.R
 delete mode 100644 r/R/chunked-array.R
 delete mode 100644 r/R/compression.R
 delete mode 100644 r/R/compute.R
 delete mode 100644 r/R/config.R
 delete mode 100644 r/R/csv.R
 delete mode 100644 r/R/dataset-factory.R
 delete mode 100644 r/R/dataset-format.R
 delete mode 100644 r/R/dataset-partition.R
 delete mode 100644 r/R/dataset-scan.R
 delete mode 100644 r/R/dataset-write.R
 delete mode 100644 r/R/dataset.R
 delete mode 100644 r/R/deprecated.R
 delete mode 100644 r/R/dictionary.R
 delete mode 100644 r/R/dplyr.R
 delete mode 100644 r/R/enums.R
 delete mode 100644 r/R/expression.R
 delete mode 100644 r/R/feather.R
 delete mode 100644 r/R/field.R
 delete mode 100644 r/R/filesystem.R
 delete mode 100644 r/R/flight.R
 delete mode 100644 r/R/install-arrow.R
 delete mode 100644 r/R/io.R
 delete mode 100644 r/R/ipc_stream.R
 delete mode 100644 r/R/json.R
 delete mode 100644 r/R/memory-pool.R
 delete mode 100644 r/R/message.R
 delete mode 100644 r/R/metadata.R
 delete mode 100644 r/R/parquet.R
 delete mode 100644 r/R/python.R
 delete mode 100644 r/R/record-batch-reader.R
 delete mode 100644 r/R/record-batch-writer.R
 delete mode 100644 r/R/record-batch.R
 delete mode 100644 r/R/reexports-bit64.R
 delete mode 100644 r/R/reexports-tidyselect.R
 delete mode 100644 r/R/scalar.R
 delete mode 100644 r/R/schema.R
 delete mode 100644 r/R/table.R
 delete mode 100644 r/R/type.R
 delete mode 100644 r/R/util.R
 delete mode 100644 r/README.md
 delete mode 100644 r/_pkgdown.yml
 delete mode 100644 r/arrow.Rproj
 delete mode 100755 r/cleanup
 delete mode 100755 r/configure
 delete mode 100644 r/configure.win
 delete mode 100644 r/cran-comments.md
 delete mode 100644 r/data-raw/codegen.R
 delete mode 100644 r/extra-tests/helpers.R
 delete mode 100644 r/extra-tests/test-read-files.R
 delete mode 100644 r/extra-tests/write-files.R
 delete mode 100644 r/inst/NOTICE.txt
 delete mode 100755 r/inst/build_arrow_static.sh
 delete mode 100644 r/inst/demo_flight_server.py
 delete mode 100644 r/inst/v0.7.1.parquet
 delete mode 100755 r/lint.sh
 delete mode 100644 r/man/ArrayData.Rd
 delete mode 100644 r/man/ChunkedArray.Rd
 delete mode 100644 r/man/Codec.Rd
 delete mode 100644 r/man/CsvReadOptions.Rd
 delete mode 100644 r/man/CsvTableReader.Rd
 delete mode 100644 r/man/DataType.Rd
 delete mode 100644 r/man/Dataset.Rd
 delete mode 100644 r/man/DictionaryType.Rd
 delete mode 100644 r/man/Expression.Rd
 delete mode 100644 r/man/FeatherReader.Rd
 delete mode 100644 r/man/Field.Rd
 delete mode 100644 r/man/FileFormat.Rd
 delete mode 100644 r/man/FileInfo.Rd
 delete mode 100644 r/man/FileSelector.Rd
 delete mode 100644 r/man/FileSystem.Rd
 delete mode 100644 r/man/FileWriteOptions.Rd
 delete mode 100644 r/man/FixedWidthType.Rd
 delete mode 100644 r/man/FragmentScanOptions.Rd
 delete mode 100644 r/man/InputStream.Rd
 delete mode 100644 r/man/MemoryPool.Rd
 delete mode 100644 r/man/Message.Rd
 delete mode 100644 r/man/MessageReader.Rd
 delete mode 100644 r/man/OutputStream.Rd
 delete mode 100644 r/man/ParquetArrowReaderProperties.Rd
 delete mode 100644 r/man/ParquetFileReader.Rd
 delete mode 100644 r/man/ParquetFileWriter.Rd
 delete mode 100644 r/man/ParquetWriterProperties.Rd
 delete mode 100644 r/man/Partitioning.Rd
 delete mode 100644 r/man/RecordBatch.Rd
 delete mode 100644 r/man/RecordBatchReader.Rd
 delete mode 100644 r/man/RecordBatchWriter.Rd
 delete mode 100644 r/man/Scalar.Rd
 delete mode 100644 r/man/Scanner.Rd
 delete mode 100644 r/man/Schema.Rd
 delete mode 100644 r/man/Table.Rd
 delete mode 100644 r/man/array.Rd
 delete mode 100644 r/man/arrow-package.Rd
 delete mode 100644 r/man/arrow_available.Rd
 delete mode 100644 r/man/arrow_info.Rd
 delete mode 100644 r/man/buffer.Rd
 delete mode 100644 r/man/call_function.Rd
 delete mode 100644 r/man/cast_options.Rd
 delete mode 100644 r/man/codec_is_available.Rd
 delete mode 100644 r/man/compression.Rd
 delete mode 100644 r/man/copy_files.Rd
 delete mode 100644 r/man/cpu_count.Rd
 delete mode 100644 r/man/data-type.Rd
 delete mode 100644 r/man/dataset_factory.Rd
 delete mode 100644 r/man/default_memory_pool.Rd
 delete mode 100644 r/man/dictionary.Rd
 delete mode 100644 r/man/enums.Rd
 delete mode 100644 r/man/flight_connect.Rd
 delete mode 100644 r/man/flight_get.Rd
 delete mode 100644 r/man/flight_put.Rd
 delete mode 100644 r/man/hive_partition.Rd
 delete mode 100644 r/man/install_arrow.Rd
 delete mode 100644 r/man/install_pyarrow.Rd
 delete mode 100644 r/man/list_compute_functions.Rd
 delete mode 100644 r/man/list_flights.Rd
 delete mode 100644 r/man/load_flight_server.Rd
 delete mode 100644 r/man/make_readable_file.Rd
 delete mode 100644 r/man/map_batches.Rd
 delete mode 100644 r/man/match_arrow.Rd
 delete mode 100644 r/man/mmap_create.Rd
 delete mode 100644 r/man/mmap_open.Rd
 delete mode 100644 r/man/open_dataset.Rd
 delete mode 100644 r/man/read_delim_arrow.Rd
 delete mode 100644 r/man/read_feather.Rd
 delete mode 100644 r/man/read_ipc_stream.Rd
 delete mode 100644 r/man/read_json_arrow.Rd
 delete mode 100644 r/man/read_message.Rd
 delete mode 100644 r/man/read_parquet.Rd
 delete mode 100644 r/man/read_schema.Rd
 delete mode 100644 r/man/reexports.Rd
 delete mode 100644 r/man/s3_bucket.Rd
 delete mode 100644 r/man/type.Rd
 delete mode 100644 r/man/unify_schemas.Rd
 delete mode 100644 r/man/value_counts.Rd
 delete mode 100644 r/man/write_dataset.Rd
 delete mode 100644 r/man/write_feather.Rd
 delete mode 100644 r/man/write_ipc_stream.Rd
 delete mode 100644 r/man/write_parquet.Rd
 delete mode 100644 r/man/write_to_raw.Rd
 delete mode 100644 r/pkgdown/extra.js
 delete mode 100644 r/src/.clang-format
 delete mode 100644 r/src/.gitignore
 delete mode 100644 r/src/Makevars.in
 delete mode 100644 r/src/array.cpp
 delete mode 100644 r/src/array_to_vector.cpp
 delete mode 100644 r/src/arraydata.cpp
 delete mode 100644 r/src/arrowExports.cpp
 delete mode 100644 r/src/arrow_cpp11.h
 delete mode 100644 r/src/arrow_types.h
 delete mode 100644 r/src/arrow_vctrs.h
 delete mode 100644 r/src/buffer.cpp
 delete mode 100644 r/src/chunkedarray.cpp
 delete mode 100644 r/src/compression.cpp
 delete mode 100644 r/src/compute.cpp
 delete mode 100644 r/src/csv.cpp
 delete mode 100644 r/src/dataset.cpp
 delete mode 100644 r/src/datatype.cpp
 delete mode 100644 r/src/expression.cpp
 delete mode 100644 r/src/feather.cpp
 delete mode 100644 r/src/field.cpp
 delete mode 100644 r/src/filesystem.cpp
 delete mode 100644 r/src/imports.cpp
 delete mode 100644 r/src/io.cpp
 delete mode 100644 r/src/json.cpp
 delete mode 100644 r/src/memorypool.cpp
 delete mode 100644 r/src/message.cpp
 delete mode 100644 r/src/nameof.h
 delete mode 100644 r/src/parquet.cpp
 delete mode 100644 r/src/py-to-r.cpp
 delete mode 100644 r/src/r_to_arrow.cpp
 delete mode 100644 r/src/recordbatch.cpp
 delete mode 100644 r/src/recordbatchreader.cpp
 delete mode 100644 r/src/recordbatchwriter.cpp
 delete mode 100644 r/src/runtimeinfo.cpp
 delete mode 100644 r/src/scalar.cpp
 delete mode 100644 r/src/schema.cpp
 delete mode 100644 r/src/symbols.cpp
 delete mode 100644 r/src/table.cpp
 delete mode 100644 r/src/threadpool.cpp
 delete mode 100644 r/src/type_infer.cpp
 delete mode 100644 r/tests/testthat.R
 delete mode 100644 r/tests/testthat/golden-files/data-arrow-extra-meta_3.0.0.parquet
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_0.17.0_lz4.feather
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_0.17.0_uncompressed.feather
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_0.17.0_zstd.feather
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_1.0.1.parquet
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_1.0.1_lz4.feather
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_1.0.1_uncompressed.feather
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_1.0.1_zstd.feather
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_2.0.0.parquet
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_2.0.0_lz4.feather
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_2.0.0_uncompressed.feather
 delete mode 100644 r/tests/testthat/golden-files/data-arrow_2.0.0_zstd.feather
 delete mode 100644 r/tests/testthat/helper-arrow.R
 delete mode 100644 r/tests/testthat/helper-data.R
 delete mode 100644 r/tests/testthat/helper-expectation.R
 delete mode 100644 r/tests/testthat/helper-parquet.R
 delete mode 100644 r/tests/testthat/helper-roundtrip.R
 delete mode 100644 r/tests/testthat/helper-skip.R
 delete mode 100644 r/tests/testthat/latin1.R
 delete mode 100644 r/tests/testthat/test-Array.R
 delete mode 100644 r/tests/testthat/test-RecordBatch.R
 delete mode 100644 r/tests/testthat/test-Table.R
 delete mode 100644 r/tests/testthat/test-array-data.R
 delete mode 100644 r/tests/testthat/test-arrow-info.R
 delete mode 100644 r/tests/testthat/test-arrow.R
 delete mode 100644 r/tests/testthat/test-backwards-compatibility.R
 delete mode 100644 r/tests/testthat/test-buffer-reader.R
 delete mode 100644 r/tests/testthat/test-buffer.R
 delete mode 100644 r/tests/testthat/test-chunked-array.R
 delete mode 100644 r/tests/testthat/test-chunked-array.txt
 delete mode 100644 r/tests/testthat/test-compressed.R
 delete mode 100644 r/tests/testthat/test-compute-aggregate.R
 delete mode 100644 r/tests/testthat/test-compute-arith.R
 delete mode 100644 r/tests/testthat/test-compute-sort.R
 delete mode 100644 r/tests/testthat/test-compute-vector.R
 delete mode 100644 r/tests/testthat/test-csv.R
 delete mode 100644 r/tests/testthat/test-data-type.R
 delete mode 100644 r/tests/testthat/test-dataset.R
 delete mode 100644 r/tests/testthat/test-dplyr-arrange.R
 delete mode 100644 r/tests/testthat/test-dplyr-filter.R
 delete mode 100644 r/tests/testthat/test-dplyr-group-by.R
 delete mode 100644 r/tests/testthat/test-dplyr-mutate.R
 delete mode 100644 r/tests/testthat/test-dplyr-string-functions.R
 delete mode 100644 r/tests/testthat/test-dplyr.R
 delete mode 100644 r/tests/testthat/test-expression.R
 delete mode 100644 r/tests/testthat/test-feather.R
 delete mode 100644 r/tests/testthat/test-field.R
 delete mode 100644 r/tests/testthat/test-filesystem.R
 delete mode 100644 r/tests/testthat/test-install-arrow.R
 delete mode 100644 r/tests/testthat/test-json.R
 delete mode 100644 r/tests/testthat/test-memory-pool.R
 delete mode 100644 r/tests/testthat/test-message-reader.R
 delete mode 100644 r/tests/testthat/test-message.R
 delete mode 100644 r/tests/testthat/test-metadata.R
 delete mode 100644 r/tests/testthat/test-parquet.R
 delete mode 100644 r/tests/testthat/test-python-flight.R
 delete mode 100644 r/tests/testthat/test-python.R
 delete mode 100644 r/tests/testthat/test-read-record-batch.R
 delete mode 100644 r/tests/testthat/test-read-write.R
 delete mode 100644 r/tests/testthat/test-record-batch-reader.R
 delete mode 100644 r/tests/testthat/test-s3-minio.R
 delete mode 100644 r/tests/testthat/test-s3.R
 delete mode 100644 r/tests/testthat/test-scalar.R
 delete mode 100644 r/tests/testthat/test-schema.R
 delete mode 100644 r/tests/testthat/test-thread-pool.R
 delete mode 100644 r/tests/testthat/test-type.R
 delete mode 100644 r/tests/testthat/test-utf.R
 delete mode 100644 r/tools/autobrew
 delete mode 100644 r/tools/nixlibs.R
 delete mode 100644 r/tools/ubsan.supp
 delete mode 100644 r/tools/winlibs.R
 delete mode 100644 r/vignettes/arrow.Rmd
 delete mode 100644 r/vignettes/dataset.Rmd
 delete mode 100644 r/vignettes/developing.Rmd
 delete mode 100644 r/vignettes/flight.Rmd
 delete mode 100644 r/vignettes/fs.Rmd
 delete mode 100644 r/vignettes/install.Rmd
 delete mode 100644 r/vignettes/python.Rmd
 delete mode 100644 ruby/Gemfile
 delete mode 100644 ruby/README.md
 delete mode 100644 ruby/Rakefile
 delete mode 100644 ruby/red-arrow-cuda/.gitignore
 delete mode 100644 ruby/red-arrow-cuda/Gemfile
 delete mode 100644 ruby/red-arrow-cuda/LICENSE.txt
 delete mode 100644 ruby/red-arrow-cuda/NOTICE.txt
 delete mode 100644 ruby/red-arrow-cuda/README.md
 delete mode 100644 ruby/red-arrow-cuda/Rakefile
 delete mode 100644 ruby/red-arrow-cuda/dependency-check/Rakefile
 delete mode 100644 ruby/red-arrow-cuda/lib/arrow-cuda.rb
 delete mode 100644 ruby/red-arrow-cuda/lib/arrow-cuda/device-manager.rb
 delete mode 100644 ruby/red-arrow-cuda/lib/arrow-cuda/loader.rb
 delete mode 100644 ruby/red-arrow-cuda/lib/arrow-cuda/version.rb
 delete mode 100644 ruby/red-arrow-cuda/red-arrow-cuda.gemspec
 delete mode 100644 ruby/red-arrow-cuda/test/helper.rb
 delete mode 100755 ruby/red-arrow-cuda/test/run-test.rb
 delete mode 100644 ruby/red-arrow-cuda/test/test-cuda.rb
 delete mode 100644 ruby/red-arrow-dataset/.gitignore
 delete mode 100644 ruby/red-arrow-dataset/Gemfile
 delete mode 100644 ruby/red-arrow-dataset/LICENSE.txt
 delete mode 100644 ruby/red-arrow-dataset/NOTICE.txt
 delete mode 100644 ruby/red-arrow-dataset/README.md
 delete mode 100644 ruby/red-arrow-dataset/Rakefile
 delete mode 100644 ruby/red-arrow-dataset/dependency-check/Rakefile
 delete mode 100644 ruby/red-arrow-dataset/lib/arrow-dataset.rb
 delete mode 100644 ruby/red-arrow-dataset/lib/arrow-dataset/in-memory-fragment.rb
 delete mode 100644 ruby/red-arrow-dataset/lib/arrow-dataset/in-memory-scan-task.rb
 delete mode 100644 ruby/red-arrow-dataset/lib/arrow-dataset/loader.rb
 delete mode 100644 ruby/red-arrow-dataset/lib/arrow-dataset/scan-options.rb
 delete mode 100644 ruby/red-arrow-dataset/lib/arrow-dataset/version.rb
 delete mode 100644 ruby/red-arrow-dataset/red-arrow-dataset.gemspec
 delete mode 100644 ruby/red-arrow-dataset/test/helper.rb
 delete mode 100755 ruby/red-arrow-dataset/test/run-test.rb
 delete mode 100644 ruby/red-arrow-dataset/test/test-in-memory-scan-task.rb
 delete mode 100644 ruby/red-arrow-dataset/test/test-scan-options.rb
 delete mode 100644 ruby/red-arrow/.gitignore
 delete mode 100644 ruby/red-arrow/.yardopts
 delete mode 100644 ruby/red-arrow/Gemfile
 delete mode 100644 ruby/red-arrow/LICENSE.txt
 delete mode 100644 ruby/red-arrow/NOTICE.txt
 delete mode 100644 ruby/red-arrow/README.md
 delete mode 100644 ruby/red-arrow/Rakefile
 delete mode 100644 ruby/red-arrow/benchmark/raw-records/boolean.yml
 delete mode 100644 ruby/red-arrow/benchmark/raw-records/decimal128.yml
 delete mode 100644 ruby/red-arrow/benchmark/raw-records/dictionary.yml
 delete mode 100644 ruby/red-arrow/benchmark/raw-records/int64.yml
 delete mode 100644 ruby/red-arrow/benchmark/raw-records/list.yml
 delete mode 100644 ruby/red-arrow/benchmark/raw-records/string.yml
 delete mode 100644 ruby/red-arrow/benchmark/raw-records/timestamp.yml
 delete mode 100644 ruby/red-arrow/benchmark/values/boolean.yml
 delete mode 100644 ruby/red-arrow/benchmark/values/decimal128.yml
 delete mode 100644 ruby/red-arrow/benchmark/values/dictionary.yml
 delete mode 100644 ruby/red-arrow/benchmark/values/int64.yml
 delete mode 100644 ruby/red-arrow/benchmark/values/list.yml
 delete mode 100644 ruby/red-arrow/benchmark/values/string.yml
 delete mode 100644 ruby/red-arrow/benchmark/values/timestamp.yml
 delete mode 100644 ruby/red-arrow/doc/text/development.md
 delete mode 100755 ruby/red-arrow/example/read-file.rb
 delete mode 100755 ruby/red-arrow/example/read-stream.rb
 delete mode 100755 ruby/red-arrow/example/write-file.rb
 delete mode 100755 ruby/red-arrow/example/write-stream.rb
 delete mode 100644 ruby/red-arrow/ext/arrow/arrow.cpp
 delete mode 100644 ruby/red-arrow/ext/arrow/converters.cpp
 delete mode 100644 ruby/red-arrow/ext/arrow/converters.hpp
 delete mode 100644 ruby/red-arrow/ext/arrow/extconf.rb
 delete mode 100644 ruby/red-arrow/ext/arrow/raw-records.cpp
 delete mode 100644 ruby/red-arrow/ext/arrow/red-arrow.hpp
 delete mode 100644 ruby/red-arrow/ext/arrow/values.cpp
 delete mode 100644 ruby/red-arrow/image/red-arrow.png
 delete mode 100644 ruby/red-arrow/lib/arrow.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/bigdecimal-extension.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/block-closable.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/buffer.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/chunked-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/column-containable.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/column.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/compression-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/csv-loader.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/csv-read-options.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/date32-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/date32-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/date64-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/date64-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/decimal128-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/decimal128-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/decimal128-data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/decimal128.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/decimal256-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/decimal256-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/decimal256-data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/decimal256.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/dense-union-data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/dictionary-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/dictionary-data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/field-containable.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/field.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/file-output-stream.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/fixed-size-binary-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/fixed-size-binary-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/generic-filterable.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/generic-takeable.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/group.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/list-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/list-data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/loader.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/null-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/null-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/path-extension.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/raw-table-converter.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/record-batch-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/record-batch-file-reader.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/record-batch-iterator.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/record-batch-stream-reader.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/record-batch.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/record-containable.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/record.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/rolling-window.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/schema.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/slicer.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/sort-key.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/sort-options.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/sparse-union-data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/struct-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/struct-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/struct-data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/table-formatter.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/table-list-formatter.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/table-loader.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/table-saver.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/table-table-formatter.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/table.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/tensor.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/time.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/time32-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/time32-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/time32-data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/time64-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/time64-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/time64-data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/timestamp-array-builder.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/timestamp-array.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/timestamp-data-type.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/version.rb
 delete mode 100644 ruby/red-arrow/lib/arrow/writable.rb
 delete mode 100644 ruby/red-arrow/red-arrow.gemspec
 delete mode 100644 ruby/red-arrow/test/fixture/TestOrcFile.test1.orc
 delete mode 100644 ruby/red-arrow/test/fixture/float-integer.csv
 delete mode 100644 ruby/red-arrow/test/fixture/integer-float.csv
 delete mode 100644 ruby/red-arrow/test/fixture/null-with-double-quote.csv
 delete mode 100644 ruby/red-arrow/test/fixture/null-without-double-quote.csv
 delete mode 100644 ruby/red-arrow/test/fixture/with-header-float.csv
 delete mode 100644 ruby/red-arrow/test/fixture/with-header.csv
 delete mode 100644 ruby/red-arrow/test/fixture/without-header-float.csv
 delete mode 100644 ruby/red-arrow/test/fixture/without-header.csv
 delete mode 100644 ruby/red-arrow/test/helper.rb
 delete mode 100644 ruby/red-arrow/test/helper/fixture.rb
 delete mode 100644 ruby/red-arrow/test/helper/omittable.rb
 delete mode 100644 ruby/red-arrow/test/raw-records/test-basic-arrays.rb
 delete mode 100644 ruby/red-arrow/test/raw-records/test-dense-union-array.rb
 delete mode 100644 ruby/red-arrow/test/raw-records/test-list-array.rb
 delete mode 100644 ruby/red-arrow/test/raw-records/test-multiple-columns.rb
 delete mode 100644 ruby/red-arrow/test/raw-records/test-sparse-union-array.rb
 delete mode 100644 ruby/red-arrow/test/raw-records/test-struct-array.rb
 delete mode 100644 ruby/red-arrow/test/raw-records/test-table.rb
 delete mode 100755 ruby/red-arrow/test/run-test.rb
 delete mode 100644 ruby/red-arrow/test/test-array-builder.rb
 delete mode 100644 ruby/red-arrow/test/test-array.rb
 delete mode 100644 ruby/red-arrow/test/test-bigdecimal.rb
 delete mode 100644 ruby/red-arrow/test/test-buffer.rb
 delete mode 100644 ruby/red-arrow/test/test-chunked-array.rb
 delete mode 100644 ruby/red-arrow/test/test-column.rb
 delete mode 100644 ruby/red-arrow/test/test-csv-loader.rb
 delete mode 100644 ruby/red-arrow/test/test-data-type.rb
 delete mode 100644 ruby/red-arrow/test/test-date32-array.rb
 delete mode 100644 ruby/red-arrow/test/test-date64-array.rb
 delete mode 100644 ruby/red-arrow/test/test-decimal128-array-builder.rb
 delete mode 100644 ruby/red-arrow/test/test-decimal128-array.rb
 delete mode 100644 ruby/red-arrow/test/test-decimal128-data-type.rb
 delete mode 100644 ruby/red-arrow/test/test-decimal128.rb
 delete mode 100644 ruby/red-arrow/test/test-decimal256-array-builder.rb
 delete mode 100644 ruby/red-arrow/test/test-decimal256-array.rb
 delete mode 100644 ruby/red-arrow/test/test-decimal256-data-type.rb
 delete mode 100644 ruby/red-arrow/test/test-decimal256.rb
 delete mode 100644 ruby/red-arrow/test/test-dense-union-data-type.rb
 delete mode 100644 ruby/red-arrow/test/test-dictionary-array.rb
 delete mode 100644 ruby/red-arrow/test/test-dictionary-data-type.rb
 delete mode 100644 ruby/red-arrow/test/test-feather.rb
 delete mode 100644 ruby/red-arrow/test/test-field.rb
 delete mode 100644 ruby/red-arrow/test/test-file-output-stream.rb
 delete mode 100644 ruby/red-arrow/test/test-fixed-size-binary-array-builder.rb
 delete mode 100644 ruby/red-arrow/test/test-fixed-size-binary-array.rb
 delete mode 100644 ruby/red-arrow/test/test-group.rb
 delete mode 100644 ruby/red-arrow/test/test-list-array-builder.rb
 delete mode 100644 ruby/red-arrow/test/test-list-array.rb
 delete mode 100644 ruby/red-arrow/test/test-list-data-type.rb
 delete mode 100644 ruby/red-arrow/test/test-null-array.rb
 delete mode 100644 ruby/red-arrow/test/test-orc.rb
 delete mode 100644 ruby/red-arrow/test/test-record-batch-builder.rb
 delete mode 100644 ruby/red-arrow/test/test-record-batch-file-reader.rb
 delete mode 100644 ruby/red-arrow/test/test-record-batch-iterator.rb
 delete mode 100644 ruby/red-arrow/test/test-record-batch.rb
 delete mode 100644 ruby/red-arrow/test/test-rolling-window.rb
 delete mode 100644 ruby/red-arrow/test/test-schema.rb
 delete mode 100644 ruby/red-arrow/test/test-slicer.rb
 delete mode 100644 ruby/red-arrow/test/test-sort-indices.rb
 delete mode 100644 ruby/red-arrow/test/test-sort-key.rb
 delete mode 100644 ruby/red-arrow/test/test-sort-options.rb
 delete mode 100644 ruby/red-arrow/test/test-sparse-union-data-type.rb
 delete mode 100644 ruby/red-arrow/test/test-struct-array-builder.rb
 delete mode 100644 ruby/red-arrow/test/test-struct-array.rb
 delete mode 100644 ruby/red-arrow/test/test-struct-data-type.rb
 delete mode 100644 ruby/red-arrow/test/test-table.rb
 delete mode 100644 ruby/red-arrow/test/test-tensor.rb
 delete mode 100644 ruby/red-arrow/test/test-time.rb
 delete mode 100644 ruby/red-arrow/test/test-time32-array.rb
 delete mode 100644 ruby/red-arrow/test/test-time32-data-type.rb
 delete mode 100644 ruby/red-arrow/test/test-time64-array.rb
 delete mode 100644 ruby/red-arrow/test/test-time64-data-type.rb
 delete mode 100644 ruby/red-arrow/test/test-timestamp-array.rb
 delete mode 100644 ruby/red-arrow/test/test-timestamp-data-type.rb
 delete mode 100644 ruby/red-arrow/test/values/test-basic-arrays.rb
 delete mode 100644 ruby/red-arrow/test/values/test-dense-union-array.rb
 delete mode 100644 ruby/red-arrow/test/values/test-list-array.rb
 delete mode 100644 ruby/red-arrow/test/values/test-sparse-union-array.rb
 delete mode 100644 ruby/red-arrow/test/values/test-struct-array.rb
 delete mode 100644 ruby/red-gandiva/.gitignore
 delete mode 100644 ruby/red-gandiva/Gemfile
 delete mode 100644 ruby/red-gandiva/LICENSE.txt
 delete mode 100644 ruby/red-gandiva/NOTICE.txt
 delete mode 100644 ruby/red-gandiva/README.md
 delete mode 100644 ruby/red-gandiva/Rakefile
 delete mode 100644 ruby/red-gandiva/dependency-check/Rakefile
 delete mode 100644 ruby/red-gandiva/lib/gandiva.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/arrow-schema.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/add.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/binary-operation.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/context.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/divide.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/elsif.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/equal.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/field.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/greater-than.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/if.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/less-than.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/literal.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/multiply.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/record.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/subtract.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/expression-builder/value.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/loader.rb
 delete mode 100644 ruby/red-gandiva/lib/gandiva/version.rb
 delete mode 100644 ruby/red-gandiva/red-gandiva.gemspec
 delete mode 100644 ruby/red-gandiva/test/expression-builder/test-add.rb
 delete mode 100644 ruby/red-gandiva/test/expression-builder/test-record.rb
 delete mode 100644 ruby/red-gandiva/test/helper.rb
 delete mode 100755 ruby/red-gandiva/test/run-test.rb
 delete mode 100644 ruby/red-gandiva/test/test-boolean-literal-node.rb
 delete mode 100644 ruby/red-gandiva/test/test-projector.rb
 delete mode 100644 ruby/red-parquet/.gitignore
 delete mode 100644 ruby/red-parquet/Gemfile
 delete mode 100644 ruby/red-parquet/LICENSE.txt
 delete mode 100644 ruby/red-parquet/NOTICE.txt
 delete mode 100644 ruby/red-parquet/README.md
 delete mode 100644 ruby/red-parquet/Rakefile
 delete mode 100644 ruby/red-parquet/dependency-check/Rakefile
 delete mode 100644 ruby/red-parquet/lib/parquet.rb
 delete mode 100644 ruby/red-parquet/lib/parquet/arrow-table-loadable.rb
 delete mode 100644 ruby/red-parquet/lib/parquet/arrow-table-savable.rb
 delete mode 100644 ruby/red-parquet/lib/parquet/loader.rb
 delete mode 100644 ruby/red-parquet/lib/parquet/version.rb
 delete mode 100644 ruby/red-parquet/lib/parquet/writer-properties.rb
 delete mode 100644 ruby/red-parquet/red-parquet.gemspec
 delete mode 100644 ruby/red-parquet/test/helper.rb
 delete mode 100755 ruby/red-parquet/test/run-test.rb
 delete mode 100644 ruby/red-parquet/test/test-arrow-table.rb
 delete mode 100644 ruby/red-plasma/.gitignore
 delete mode 100644 ruby/red-plasma/Gemfile
 delete mode 100644 ruby/red-plasma/LICENSE.txt
 delete mode 100644 ruby/red-plasma/NOTICE.txt
 delete mode 100644 ruby/red-plasma/README.md
 delete mode 100644 ruby/red-plasma/Rakefile
 delete mode 100644 ruby/red-plasma/dependency-check/Rakefile
 delete mode 100644 ruby/red-plasma/lib/plasma.rb
 delete mode 100644 ruby/red-plasma/lib/plasma/client.rb
 delete mode 100644 ruby/red-plasma/lib/plasma/loader.rb
 delete mode 100644 ruby/red-plasma/lib/plasma/version.rb
 delete mode 100644 ruby/red-plasma/red-plasma.gemspec
 delete mode 100644 ruby/red-plasma/test/helper.rb
 delete mode 100644 ruby/red-plasma/test/helper/omittable.rb
 delete mode 100644 ruby/red-plasma/test/helper/plasma-store.rb
 delete mode 100755 ruby/red-plasma/test/run-test.rb
 delete mode 100644 ruby/red-plasma/test/test-plasma-client.rb
 delete mode 100644 rust/ballista/.dockerignore
 delete mode 100644 rust/ballista/README.md
 delete mode 100755 rust/ballista/dev/build-rust-base.sh
 delete mode 100755 rust/ballista/dev/build-rust.sh
 delete mode 100755 rust/ballista/dev/integration-tests.sh
 delete mode 100644 rust/ballista/docker/README.md
 delete mode 100644 rust/ballista/docker/rust-base.dockerfile
 delete mode 100644 rust/ballista/docker/rust.dockerfile
 delete mode 100644 rust/ballista/docs/README.md
 delete mode 100644 rust/ballista/docs/architecture.md
 delete mode 100644 rust/ballista/docs/dev-env-rust.md
 delete mode 100644 rust/ballista/docs/images/query-execution.png
 delete mode 100644 rust/ballista/docs/integration-testing.md
 delete mode 100644 rust/ballista/docs/release-process.md
 delete mode 100644 rust/ballista/docs/rust-docker.md
 delete mode 100644 rust/ballista/docs/user-guide/.gitignore
 delete mode 100644 rust/ballista/docs/user-guide/README.md
 delete mode 100644 rust/ballista/docs/user-guide/book.toml
 delete mode 100644 rust/ballista/docs/user-guide/src/SUMMARY.md
 delete mode 100644 rust/ballista/docs/user-guide/src/client-rust.md
 delete mode 100644 rust/ballista/docs/user-guide/src/clients.md
 delete mode 100644 rust/ballista/docs/user-guide/src/configuration.md
 delete mode 100644 rust/ballista/docs/user-guide/src/deployment.md
 delete mode 100644 rust/ballista/docs/user-guide/src/docker-compose.md
 delete mode 100644 rust/ballista/docs/user-guide/src/faq.md
 delete mode 100644 rust/ballista/docs/user-guide/src/img/ballista-architecture.png
 delete mode 100644 rust/ballista/docs/user-guide/src/introduction.md
 delete mode 100644 rust/ballista/docs/user-guide/src/kubernetes.md
 delete mode 100644 rust/ballista/docs/user-guide/src/standalone.md
 delete mode 100644 rust/ballista/rust/.dockerignore
 delete mode 100644 rust/ballista/rust/.gitignore
 delete mode 100644 rust/ballista/rust/Cargo.toml
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/.dockerignore
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/.gitignore
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/Cargo.toml
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/README.md
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/docker-compose.yaml
 delete mode 100755 rust/ballista/rust/benchmarks/tpch/entrypoint.sh
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q1.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q10.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q11.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q12.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q13.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q14.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q16.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q17.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q18.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q19.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q2.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q20.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q21.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q22.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q3.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q4.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q5.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q6.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q7.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q8.sql
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/queries/q9.sql
 delete mode 100755 rust/ballista/rust/benchmarks/tpch/run.sh
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/src/main.rs
 delete mode 100755 rust/ballista/rust/benchmarks/tpch/tpch-gen.sh
 delete mode 100644 rust/ballista/rust/benchmarks/tpch/tpchgen.dockerfile
 delete mode 100644 rust/ballista/rust/client/Cargo.toml
 delete mode 100644 rust/ballista/rust/client/README.md
 delete mode 100644 rust/ballista/rust/client/src/columnar_batch.rs
 delete mode 100644 rust/ballista/rust/client/src/context.rs
 delete mode 100644 rust/ballista/rust/client/src/lib.rs
 delete mode 100644 rust/ballista/rust/client/src/prelude.rs
 delete mode 100644 rust/ballista/rust/core/Cargo.toml
 delete mode 100644 rust/ballista/rust/core/README.md
 delete mode 100644 rust/ballista/rust/core/build.rs
 delete mode 100644 rust/ballista/rust/core/proto/ballista.proto
 delete mode 100644 rust/ballista/rust/core/src/client.rs
 delete mode 100644 rust/ballista/rust/core/src/datasource.rs
 delete mode 100644 rust/ballista/rust/core/src/error.rs
 delete mode 100644 rust/ballista/rust/core/src/execution_plans/mod.rs
 delete mode 100644 rust/ballista/rust/core/src/execution_plans/query_stage.rs
 delete mode 100644 rust/ballista/rust/core/src/execution_plans/shuffle_reader.rs
 delete mode 100644 rust/ballista/rust/core/src/execution_plans/unresolved_shuffle.rs
 delete mode 100644 rust/ballista/rust/core/src/lib.rs
 delete mode 100644 rust/ballista/rust/core/src/memory_stream.rs
 delete mode 100644 rust/ballista/rust/core/src/serde/logical_plan/from_proto.rs
 delete mode 100644 rust/ballista/rust/core/src/serde/logical_plan/mod.rs
 delete mode 100644 rust/ballista/rust/core/src/serde/logical_plan/to_proto.rs
 delete mode 100644 rust/ballista/rust/core/src/serde/mod.rs
 delete mode 100644 rust/ballista/rust/core/src/serde/physical_plan/from_proto.rs
 delete mode 100644 rust/ballista/rust/core/src/serde/physical_plan/mod.rs
 delete mode 100644 rust/ballista/rust/core/src/serde/physical_plan/to_proto.rs
 delete mode 100644 rust/ballista/rust/core/src/serde/scheduler/from_proto.rs
 delete mode 100644 rust/ballista/rust/core/src/serde/scheduler/mod.rs
 delete mode 100644 rust/ballista/rust/core/src/serde/scheduler/to_proto.rs
 delete mode 100644 rust/ballista/rust/core/src/utils.rs
 delete mode 100644 rust/ballista/rust/executor/Cargo.toml
 delete mode 100644 rust/ballista/rust/executor/README.md
 delete mode 100644 rust/ballista/rust/executor/build.rs
 delete mode 100644 rust/ballista/rust/executor/examples/example_executor_config.toml
 delete mode 100644 rust/ballista/rust/executor/executor_config_spec.toml
 delete mode 100644 rust/ballista/rust/executor/src/collect.rs
 delete mode 100644 rust/ballista/rust/executor/src/execution_loop.rs
 delete mode 100644 rust/ballista/rust/executor/src/flight_service.rs
 delete mode 100644 rust/ballista/rust/executor/src/lib.rs
 delete mode 100644 rust/ballista/rust/executor/src/main.rs
 delete mode 100644 rust/ballista/rust/scheduler/Cargo.toml
 delete mode 100644 rust/ballista/rust/scheduler/README.md
 delete mode 100644 rust/ballista/rust/scheduler/build.rs
 delete mode 100644 rust/ballista/rust/scheduler/scheduler_config_spec.toml
 delete mode 100644 rust/ballista/rust/scheduler/src/api/handlers.rs
 delete mode 100644 rust/ballista/rust/scheduler/src/api/mod.rs
 delete mode 100644 rust/ballista/rust/scheduler/src/lib.rs
 delete mode 100644 rust/ballista/rust/scheduler/src/main.rs
 delete mode 100644 rust/ballista/rust/scheduler/src/planner.rs
 delete mode 100644 rust/ballista/rust/scheduler/src/state/etcd.rs
 delete mode 100644 rust/ballista/rust/scheduler/src/state/mod.rs
 delete mode 100644 rust/ballista/rust/scheduler/src/state/standalone.rs
 delete mode 100644 rust/ballista/rust/scheduler/src/test_utils.rs
 delete mode 100644 rust/ballista/rust/scheduler/testdata/customer/customer.tbl
 delete mode 100644 rust/ballista/rust/scheduler/testdata/lineitem/partition0.tbl
 delete mode 100644 rust/ballista/rust/scheduler/testdata/lineitem/partition1.tbl
 delete mode 100644 rust/ballista/rust/scheduler/testdata/nation/nation.tbl
 delete mode 100644 rust/ballista/rust/scheduler/testdata/orders/orders.tbl
 delete mode 100644 rust/ballista/rust/scheduler/testdata/part/part.tbl
 delete mode 100644 rust/ballista/rust/scheduler/testdata/partsupp/partsupp.tbl
 delete mode 100644 rust/ballista/rust/scheduler/testdata/region/region.tbl
 delete mode 100644 rust/ballista/rust/scheduler/testdata/supplier/supplier.tbl
 delete mode 100644 rust/ballista/ui/scheduler/.gitignore
 delete mode 100644 rust/ballista/ui/scheduler/README.md
 delete mode 100644 rust/ballista/ui/scheduler/index.d.ts
 delete mode 100644 rust/ballista/ui/scheduler/package.json
 delete mode 100644 rust/ballista/ui/scheduler/public/favicon.ico
 delete mode 100644 rust/ballista/ui/scheduler/public/index.html
 delete mode 100644 rust/ballista/ui/scheduler/public/logo192.png
 delete mode 100644 rust/ballista/ui/scheduler/public/logo512.png
 delete mode 100644 rust/ballista/ui/scheduler/public/manifest.json
 delete mode 100644 rust/ballista/ui/scheduler/public/robots.txt
 delete mode 100644 rust/ballista/ui/scheduler/react-table-config.d.ts
 delete mode 100644 rust/ballista/ui/scheduler/src/App.css
 delete mode 100644 rust/ballista/ui/scheduler/src/App.test.tsx
 delete mode 100644 rust/ballista/ui/scheduler/src/App.tsx
 delete mode 100644 rust/ballista/ui/scheduler/src/components/DataTable.tsx
 delete mode 100644 rust/ballista/ui/scheduler/src/components/Empty.tsx
 delete mode 100644 rust/ballista/ui/scheduler/src/components/Footer.tsx
 delete mode 100644 rust/ballista/ui/scheduler/src/components/Header.tsx
 delete mode 100644 rust/ballista/ui/scheduler/src/components/NodesList.tsx
 delete mode 100644 rust/ballista/ui/scheduler/src/components/QueriesList.tsx
 delete mode 100644 rust/ballista/ui/scheduler/src/components/Summary.tsx
 delete mode 100644 rust/ballista/ui/scheduler/src/components/logo.svg
 delete mode 100644 rust/ballista/ui/scheduler/src/index.css
 delete mode 100644 rust/ballista/ui/scheduler/src/index.tsx
 delete mode 100644 rust/ballista/ui/scheduler/src/react-app-env.d.ts
 delete mode 100644 rust/ballista/ui/scheduler/src/reportWebVitals.ts
 delete mode 100644 rust/ballista/ui/scheduler/src/setupTests.ts
 delete mode 100644 rust/ballista/ui/scheduler/tsconfig.json
 delete mode 100644 rust/ballista/ui/scheduler/yarn.lock
 delete mode 100644 rust/benchmarks/Cargo.toml
 delete mode 100644 rust/benchmarks/README.md
 delete mode 100644 rust/benchmarks/src/bin/nyctaxi.rs
 delete mode 100644 rust/benchmarks/src/bin/tpch.rs
 delete mode 100644 rust/datafusion-examples/Cargo.toml
 delete mode 100644 rust/datafusion-examples/examples/README.md
 delete mode 100644 rust/datafusion-examples/examples/csv_sql.rs
 delete mode 100644 rust/datafusion-examples/examples/dataframe.rs
 delete mode 100644 rust/datafusion-examples/examples/dataframe_in_memory.rs
 delete mode 100644 rust/datafusion-examples/examples/flight_client.rs
 delete mode 100644 rust/datafusion-examples/examples/flight_server.rs
 delete mode 100644 rust/datafusion-examples/examples/parquet_sql.rs
 delete mode 100644 rust/datafusion-examples/examples/simple_udaf.rs
 delete mode 100644 rust/datafusion-examples/examples/simple_udf.rs
 delete mode 100644 rust/datafusion/Cargo.toml
 delete mode 100644 rust/datafusion/DEVELOPERS.md
 delete mode 100644 rust/datafusion/Dockerfile
 delete mode 100644 rust/datafusion/README.md
 delete mode 100644 rust/datafusion/benches/aggregate_query_sql.rs
 delete mode 100644 rust/datafusion/benches/filter_query_sql.rs
 delete mode 100644 rust/datafusion/benches/math_query_sql.rs
 delete mode 100644 rust/datafusion/benches/scalar.rs
 delete mode 100644 rust/datafusion/benches/sort_limit_query_sql.rs
 delete mode 100644 rust/datafusion/docs/cli.md
 delete mode 100644 rust/datafusion/docs/images/DataFusion-Logo-Dark.png
 delete mode 100644 rust/datafusion/docs/images/DataFusion-Logo-Dark.svg
 delete mode 100644 rust/datafusion/docs/images/DataFusion-Logo-Light.png
 delete mode 100644 rust/datafusion/docs/images/DataFusion-Logo-Light.svg
 delete mode 100644 rust/datafusion/src/bin/main.rs
 delete mode 100644 rust/datafusion/src/bin/repl.rs
 delete mode 100644 rust/datafusion/src/catalog/catalog.rs
 delete mode 100644 rust/datafusion/src/catalog/information_schema.rs
 delete mode 100644 rust/datafusion/src/catalog/mod.rs
 delete mode 100644 rust/datafusion/src/catalog/schema.rs
 delete mode 100644 rust/datafusion/src/dataframe.rs
 delete mode 100644 rust/datafusion/src/datasource/csv.rs
 delete mode 100644 rust/datafusion/src/datasource/datasource.rs
 delete mode 100644 rust/datafusion/src/datasource/empty.rs
 delete mode 100644 rust/datafusion/src/datasource/memory.rs
 delete mode 100644 rust/datafusion/src/datasource/mod.rs
 delete mode 100644 rust/datafusion/src/datasource/parquet.rs
 delete mode 100644 rust/datafusion/src/error.rs
 delete mode 100644 rust/datafusion/src/execution/context.rs
 delete mode 100644 rust/datafusion/src/execution/dataframe_impl.rs
 delete mode 100644 rust/datafusion/src/execution/mod.rs
 delete mode 100644 rust/datafusion/src/lib.rs
 delete mode 100644 rust/datafusion/src/logical_plan/builder.rs
 delete mode 100644 rust/datafusion/src/logical_plan/dfschema.rs
 delete mode 100644 rust/datafusion/src/logical_plan/display.rs
 delete mode 100644 rust/datafusion/src/logical_plan/expr.rs
 delete mode 100644 rust/datafusion/src/logical_plan/extension.rs
 delete mode 100644 rust/datafusion/src/logical_plan/mod.rs
 delete mode 100644 rust/datafusion/src/logical_plan/operators.rs
 delete mode 100644 rust/datafusion/src/logical_plan/plan.rs
 delete mode 100644 rust/datafusion/src/logical_plan/registry.rs
 delete mode 100644 rust/datafusion/src/optimizer/constant_folding.rs
 delete mode 100644 rust/datafusion/src/optimizer/filter_push_down.rs
 delete mode 100644 rust/datafusion/src/optimizer/hash_build_probe_order.rs
 delete mode 100644 rust/datafusion/src/optimizer/limit_push_down.rs
 delete mode 100644 rust/datafusion/src/optimizer/mod.rs
 delete mode 100644 rust/datafusion/src/optimizer/optimizer.rs
 delete mode 100644 rust/datafusion/src/optimizer/projection_push_down.rs
 delete mode 100644 rust/datafusion/src/optimizer/utils.rs
 delete mode 100644 rust/datafusion/src/physical_optimizer/coalesce_batches.rs
 delete mode 100644 rust/datafusion/src/physical_optimizer/merge_exec.rs
 delete mode 100644 rust/datafusion/src/physical_optimizer/mod.rs
 delete mode 100644 rust/datafusion/src/physical_optimizer/optimizer.rs
 delete mode 100644 rust/datafusion/src/physical_optimizer/repartition.rs
 delete mode 100644 rust/datafusion/src/physical_plan/aggregates.rs
 delete mode 100644 rust/datafusion/src/physical_plan/array_expressions.rs
 delete mode 100644 rust/datafusion/src/physical_plan/coalesce_batches.rs
 delete mode 100644 rust/datafusion/src/physical_plan/common.rs
 delete mode 100644 rust/datafusion/src/physical_plan/crypto_expressions.rs
 delete mode 100644 rust/datafusion/src/physical_plan/csv.rs
 delete mode 100644 rust/datafusion/src/physical_plan/datetime_expressions.rs
 delete mode 100644 rust/datafusion/src/physical_plan/distinct_expressions.rs
 delete mode 100644 rust/datafusion/src/physical_plan/empty.rs
 delete mode 100644 rust/datafusion/src/physical_plan/explain.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/average.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/binary.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/case.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/cast.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/coercion.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/column.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/count.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/in_list.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/is_not_null.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/is_null.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/literal.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/min_max.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/mod.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/negative.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/not.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/nullif.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/sum.rs
 delete mode 100644 rust/datafusion/src/physical_plan/expressions/try_cast.rs
 delete mode 100644 rust/datafusion/src/physical_plan/filter.rs
 delete mode 100644 rust/datafusion/src/physical_plan/functions.rs
 delete mode 100644 rust/datafusion/src/physical_plan/group_scalar.rs
 delete mode 100644 rust/datafusion/src/physical_plan/hash_aggregate.rs
 delete mode 100644 rust/datafusion/src/physical_plan/hash_join.rs
 delete mode 100644 rust/datafusion/src/physical_plan/hash_utils.rs
 delete mode 100644 rust/datafusion/src/physical_plan/limit.rs
 delete mode 100644 rust/datafusion/src/physical_plan/math_expressions.rs
 delete mode 100644 rust/datafusion/src/physical_plan/memory.rs
 delete mode 100644 rust/datafusion/src/physical_plan/merge.rs
 delete mode 100644 rust/datafusion/src/physical_plan/mod.rs
 delete mode 100644 rust/datafusion/src/physical_plan/parquet.rs
 delete mode 100644 rust/datafusion/src/physical_plan/planner.rs
 delete mode 100644 rust/datafusion/src/physical_plan/projection.rs
 delete mode 100644 rust/datafusion/src/physical_plan/regex_expressions.rs
 delete mode 100644 rust/datafusion/src/physical_plan/repartition.rs
 delete mode 100644 rust/datafusion/src/physical_plan/sort.rs
 delete mode 100644 rust/datafusion/src/physical_plan/string_expressions.rs
 delete mode 100644 rust/datafusion/src/physical_plan/type_coercion.rs
 delete mode 100644 rust/datafusion/src/physical_plan/udaf.rs
 delete mode 100644 rust/datafusion/src/physical_plan/udf.rs
 delete mode 100644 rust/datafusion/src/physical_plan/unicode_expressions.rs
 delete mode 100644 rust/datafusion/src/physical_plan/union.rs
 delete mode 100644 rust/datafusion/src/prelude.rs
 delete mode 100644 rust/datafusion/src/scalar.rs
 delete mode 100644 rust/datafusion/src/sql/mod.rs
 delete mode 100644 rust/datafusion/src/sql/parser.rs
 delete mode 100644 rust/datafusion/src/sql/planner.rs
 delete mode 100644 rust/datafusion/src/sql/utils.rs
 delete mode 100644 rust/datafusion/src/test/exec.rs
 delete mode 100644 rust/datafusion/src/test/mod.rs
 delete mode 100644 rust/datafusion/src/test/user_defined.rs
 delete mode 100644 rust/datafusion/src/test/variable.rs
 delete mode 100644 rust/datafusion/src/variable/mod.rs
 delete mode 100644 rust/datafusion/tests/aggregate_simple.csv
 delete mode 100644 rust/datafusion/tests/custom_sources.rs
 delete mode 100644 rust/datafusion/tests/customer.csv
 delete mode 100644 rust/datafusion/tests/dataframe.rs
 delete mode 100644 rust/datafusion/tests/example.csv
 delete mode 100644 rust/datafusion/tests/provider_filter_pushdown.rs
 delete mode 100644 rust/datafusion/tests/sql.rs
 delete mode 100644 rust/datafusion/tests/user_defined_plan.rs

[arrow-rs] 08/14: Removed go.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit 8d9f27fd6c5b8ddced4e39d3b54a525b33691541
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:21:03 2021 +0000

    Removed go.
---
 go/README.md                                       |   124 -
 go/arrow/.editorconfig                             |    21 -
 go/arrow/.gitignore                                |    35 -
 go/arrow/Gopkg.lock                                |    44 -
 go/arrow/Gopkg.toml                                |    23 -
 go/arrow/LICENSE.txt                               |  1987 ----
 go/arrow/Makefile                                  |    54 -
 go/arrow/_examples/helloworld/main.go              |    32 -
 go/arrow/_tools/tmpl/main.go                       |   267 -
 go/arrow/_tools/tmpl/main_test.go                  |    73 -
 go/arrow/array/array.go                            |   208 -
 go/arrow/array/array_test.go                       |   301 -
 go/arrow/array/binary.go                           |   134 -
 go/arrow/array/binary_test.go                      |   430 -
 go/arrow/array/binarybuilder.go                    |   217 -
 go/arrow/array/binarybuilder_test.go               |    87 -
 go/arrow/array/boolean.go                          |    95 -
 go/arrow/array/boolean_test.go                     |   288 -
 go/arrow/array/booleanbuilder.go                   |   165 -
 go/arrow/array/booleanbuilder_test.go              |    90 -
 go/arrow/array/bufferbuilder.go                    |   127 -
 go/arrow/array/bufferbuilder_byte.go               |    30 -
 go/arrow/array/bufferbuilder_numeric.gen.go        |    58 -
 go/arrow/array/bufferbuilder_numeric.gen.go.tmpl   |    61 -
 go/arrow/array/bufferbuilder_numeric_test.go       |   106 -
 go/arrow/array/builder.go                          |   289 -
 go/arrow/array/builder_test.go                     |    83 -
 go/arrow/array/compare.go                          |   474 -
 go/arrow/array/compare_test.go                     |   531 -
 go/arrow/array/data.go                             |   179 -
 go/arrow/array/data_test.go                        |    51 -
 go/arrow/array/decimal128.go                       |   235 -
 go/arrow/array/decimal128_test.go                  |   179 -
 go/arrow/array/doc.go                              |    20 -
 go/arrow/array/fixed_size_list.go                  |   240 -
 go/arrow/array/fixed_size_list_test.go             |   215 -
 go/arrow/array/fixedsize_binary.go                 |    95 -
 go/arrow/array/fixedsize_binary_test.go            |   111 -
 go/arrow/array/fixedsize_binarybuilder.go          |   154 -
 go/arrow/array/fixedsize_binarybuilder_test.go     |   107 -
 go/arrow/array/float16.go                          |    87 -
 go/arrow/array/float16_builder.go                  |   165 -
 go/arrow/array/float16_builder_test.go             |   119 -
 go/arrow/array/interval.go                         |   434 -
 go/arrow/array/interval_test.go                    |   276 -
 go/arrow/array/list.go                             |   269 -
 go/arrow/array/list_test.go                        |   213 -
 go/arrow/array/null.go                             |   140 -
 go/arrow/array/null_test.go                        |    77 -
 go/arrow/array/numeric.gen.go                      |  1098 --
 go/arrow/array/numeric.gen.go.tmpl                 |    95 -
 go/arrow/array/numeric_test.go                     |   616 --
 go/arrow/array/numericbuilder.gen.go               |  2227 ----
 go/arrow/array/numericbuilder.gen.go.tmpl          |   182 -
 go/arrow/array/numericbuilder.gen_test.go          |  2700 -----
 go/arrow/array/numericbuilder.gen_test.go.tmpl     |   216 -
 go/arrow/array/record.go                           |   345 -
 go/arrow/array/record_test.go                      |   709 --
 go/arrow/array/string.go                           |   205 -
 go/arrow/array/string_test.go                      |   183 -
 go/arrow/array/struct.go                           |   278 -
 go/arrow/array/struct_test.go                      |   409 -
 go/arrow/array/table.go                            |   455 -
 go/arrow/array/table_test.go                       |   747 --
 go/arrow/array/util.go                             |    24 -
 go/arrow/arrio/arrio.go                            |    91 -
 go/arrow/arrio/arrio_test.go                       |   205 -
 go/arrow/bitutil/bitutil.go                        |   159 -
 go/arrow/bitutil/bitutil_test.go                   |   287 -
 go/arrow/compare.go                                |    79 -
 go/arrow/compare_test.go                           |   286 -
 go/arrow/csv/common.go                             |   174 -
 go/arrow/csv/reader.go                             |   531 -
 go/arrow/csv/reader_test.go                        |   604 -
 go/arrow/csv/testdata/header.csv                   |    21 -
 go/arrow/csv/testdata/simple.csv                   |    28 -
 go/arrow/csv/testdata/types.csv                    |    21 -
 go/arrow/csv/writer.go                             |   218 -
 go/arrow/csv/writer_test.go                        |   274 -
 go/arrow/datatype.go                               |   143 -
 go/arrow/datatype_binary.go                        |    41 -
 go/arrow/datatype_binary_test.go                   |    53 -
 go/arrow/datatype_fixedwidth.go                    |   213 -
 go/arrow/datatype_fixedwidth_test.go               |   297 -
 go/arrow/datatype_nested.go                        |   180 -
 go/arrow/datatype_nested_test.go                   |   356 -
 go/arrow/datatype_null.go                          |    29 -
 go/arrow/datatype_null_test.go                     |    38 -
 go/arrow/datatype_numeric.gen.go                   |   134 -
 go/arrow/datatype_numeric.gen.go.tmpl              |    40 -
 go/arrow/datatype_numeric.gen.go.tmpldata          |    66 -
 go/arrow/decimal128/decimal128.go                  |    73 -
 go/arrow/decimal128/decimal128_test.go             |    94 -
 go/arrow/doc.go                                    |    39 -
 go/arrow/endian/big.go                             |    25 -
 go/arrow/endian/little.go                          |    25 -
 go/arrow/example_test.go                           |   595 -
 go/arrow/flight/Flight.pb.go                       |  1473 ---
 go/arrow/flight/Flight_grpc.pb.go                  |   877 --
 go/arrow/flight/basic_auth_flight_test.go          |   205 -
 go/arrow/flight/client.go                          |   129 -
 go/arrow/flight/client_auth.go                     |    91 -
 go/arrow/flight/example_flight_server_test.go      |    86 -
 go/arrow/flight/flight_test.go                     |   313 -
 go/arrow/flight/gen.go                             |    19 -
 go/arrow/flight/record_batch_reader.go             |    87 -
 go/arrow/flight/record_batch_writer.go             |    72 -
 go/arrow/flight/server.go                          |   118 -
 go/arrow/flight/server_auth.go                     |   229 -
 go/arrow/float16/float16.go                        |    70 -
 go/arrow/float16/float16_test.go                   |    45 -
 go/arrow/gen-flatbuffers.go                        |   122 -
 go/arrow/go.mod                                    |    38 -
 go/arrow/go.sum                                    |   110 -
 go/arrow/internal/arrdata/arrdata.go               |  1189 --
 go/arrow/internal/arrdata/ioutil.go                |   274 -
 go/arrow/internal/arrjson/arrjson.go               |  1501 ---
 go/arrow/internal/arrjson/arrjson_test.go          |  3104 ------
 go/arrow/internal/arrjson/option.go                |    57 -
 go/arrow/internal/arrjson/reader.go                |   100 -
 go/arrow/internal/arrjson/writer.go                |   116 -
 go/arrow/internal/cpu/README.md                    |    42 -
 go/arrow/internal/cpu/cpu.go                       |    77 -
 go/arrow/internal/cpu/cpu_s390x.go                 |     7 -
 go/arrow/internal/cpu/cpu_test.go                  |    51 -
 go/arrow/internal/cpu/cpu_x86.go                   |   107 -
 go/arrow/internal/cpu/cpu_x86.s                    |    32 -
 go/arrow/internal/debug/assert_off.go              |    24 -
 go/arrow/internal/debug/assert_on.go               |    28 -
 go/arrow/internal/debug/doc.go                     |    32 -
 go/arrow/internal/debug/log_off.go                 |    21 -
 go/arrow/internal/debug/log_on.go                  |    32 -
 go/arrow/internal/debug/util.go                    |    37 -
 go/arrow/internal/flatbuf/Binary.go                |    51 -
 go/arrow/internal/flatbuf/Block.go                 |    74 -
 go/arrow/internal/flatbuf/BodyCompression.go       |    87 -
 go/arrow/internal/flatbuf/BodyCompressionMethod.go |    52 -
 go/arrow/internal/flatbuf/Bool.go                  |    50 -
 go/arrow/internal/flatbuf/Buffer.go                |    73 -
 go/arrow/internal/flatbuf/CompressionType.go       |    45 -
 go/arrow/internal/flatbuf/Date.go                  |    71 -
 go/arrow/internal/flatbuf/DateUnit.go              |    45 -
 go/arrow/internal/flatbuf/Decimal.go               |   107 -
 go/arrow/internal/flatbuf/DictionaryBatch.go       |   108 -
 go/arrow/internal/flatbuf/DictionaryEncoding.go    |   135 -
 go/arrow/internal/flatbuf/DictionaryKind.go        |    47 -
 go/arrow/internal/flatbuf/Duration.go              |    65 -
 go/arrow/internal/flatbuf/Endianness.go            |    47 -
 go/arrow/internal/flatbuf/Feature.go               |    71 -
 go/arrow/internal/flatbuf/Field.go                 |   188 -
 go/arrow/internal/flatbuf/FieldNode.go             |    76 -
 go/arrow/internal/flatbuf/FixedSizeBinary.go       |    67 -
 go/arrow/internal/flatbuf/FixedSizeList.go         |    67 -
 go/arrow/internal/flatbuf/FloatingPoint.go         |    65 -
 go/arrow/internal/flatbuf/Footer.go                |   162 -
 go/arrow/internal/flatbuf/Int.go                   |    80 -
 go/arrow/internal/flatbuf/Interval.go              |    65 -
 go/arrow/internal/flatbuf/IntervalUnit.go          |    45 -
 go/arrow/internal/flatbuf/KeyValue.go              |    75 -
 go/arrow/internal/flatbuf/LargeBinary.go           |    52 -
 go/arrow/internal/flatbuf/LargeList.go             |    52 -
 go/arrow/internal/flatbuf/LargeUtf8.go             |    52 -
 go/arrow/internal/flatbuf/List.go                  |    50 -
 go/arrow/internal/flatbuf/Map.go                   |    92 -
 go/arrow/internal/flatbuf/Message.go               |   133 -
 go/arrow/internal/flatbuf/MessageHeader.go         |    65 -
 go/arrow/internal/flatbuf/MetadataVersion.go       |    65 -
 go/arrow/internal/flatbuf/Null.go                  |    51 -
 go/arrow/internal/flatbuf/Precision.go             |    48 -
 go/arrow/internal/flatbuf/RecordBatch.go           |   154 -
 go/arrow/internal/flatbuf/Schema.go                |   159 -
 .../internal/flatbuf/SparseMatrixCompressedAxis.go |    45 -
 go/arrow/internal/flatbuf/SparseMatrixIndexCSR.go  |   181 -
 go/arrow/internal/flatbuf/SparseMatrixIndexCSX.go  |   200 -
 go/arrow/internal/flatbuf/SparseTensor.go          |   175 -
 go/arrow/internal/flatbuf/SparseTensorIndex.go     |    51 -
 go/arrow/internal/flatbuf/SparseTensorIndexCOO.go  |   179 -
 go/arrow/internal/flatbuf/SparseTensorIndexCSF.go  |   291 -
 go/arrow/internal/flatbuf/Struct_.go               |    53 -
 go/arrow/internal/flatbuf/Tensor.go                |   163 -
 go/arrow/internal/flatbuf/TensorDim.go             |    83 -
 go/arrow/internal/flatbuf/Time.go                  |    83 -
 go/arrow/internal/flatbuf/TimeUnit.go              |    51 -
 go/arrow/internal/flatbuf/Timestamp.go             |   122 -
 go/arrow/internal/flatbuf/Type.go                  |   108 -
 go/arrow/internal/flatbuf/Union.go                 |   101 -
 go/arrow/internal/flatbuf/UnionMode.go             |    45 -
 go/arrow/internal/flatbuf/Utf8.go                  |    51 -
 go/arrow/internal/testing/tools/bits.go            |    40 -
 go/arrow/internal/testing/tools/bits_test.go       |    42 -
 go/arrow/internal/testing/tools/bool.go            |    25 -
 go/arrow/ipc/cmd/arrow-cat/main.go                 |   216 -
 go/arrow/ipc/cmd/arrow-cat/main_test.go            |   582 -
 go/arrow/ipc/cmd/arrow-file-to-stream/main.go      |    83 -
 go/arrow/ipc/cmd/arrow-file-to-stream/main_test.go |    73 -
 .../ipc/cmd/arrow-json-integration-test/main.go    |   226 -
 .../cmd/arrow-json-integration-test/main_test.go   |    94 -
 go/arrow/ipc/cmd/arrow-ls/main.go                  |   201 -
 go/arrow/ipc/cmd/arrow-ls/main_test.go             |   341 -
 go/arrow/ipc/cmd/arrow-stream-to-file/main.go      |    71 -
 go/arrow/ipc/cmd/arrow-stream-to-file/main_test.go |    82 -
 go/arrow/ipc/compression.go                        |   109 -
 go/arrow/ipc/dict.go                               |    85 -
 go/arrow/ipc/dict_test.go                          |   196 -
 go/arrow/ipc/file_reader.go                        |   615 --
 go/arrow/ipc/file_test.go                          |    83 -
 go/arrow/ipc/file_writer.go                        |   376 -
 go/arrow/ipc/ipc.go                                |   144 -
 go/arrow/ipc/message.go                            |   241 -
 go/arrow/ipc/metadata.go                           |  1073 --
 go/arrow/ipc/metadata_test.go                      |   159 -
 go/arrow/ipc/reader.go                             |   209 -
 go/arrow/ipc/stream_test.go                        |   111 -
 go/arrow/ipc/writer.go                             |   565 -
 go/arrow/math/Makefile                             |    90 -
 go/arrow/math/_lib/.gitignore                      |    18 -
 go/arrow/math/_lib/CMakeLists.txt                  |    22 -
 go/arrow/math/_lib/arch.h                          |    27 -
 go/arrow/math/_lib/float64.c                       |    26 -
 go/arrow/math/_lib/float64_avx2.s                  |   176 -
 go/arrow/math/_lib/float64_sse4.s                  |   103 -
 go/arrow/math/_lib/int64.c                         |    27 -
 go/arrow/math/_lib/int64_avx2.s                    |   181 -
 go/arrow/math/_lib/int64_sse4.s                    |   108 -
 go/arrow/math/_lib/uint64.c                        |    27 -
 go/arrow/math/_lib/uint64_avx2.s                   |   181 -
 go/arrow/math/_lib/uint64_sse4.s                   |   108 -
 go/arrow/math/doc.go                               |    30 -
 go/arrow/math/float64.go                           |    47 -
 go/arrow/math/float64.tmpldata                     |     4 -
 go/arrow/math/float64_amd64.go                     |    33 -
 go/arrow/math/float64_avx2_amd64.go                |    41 -
 go/arrow/math/float64_avx2_amd64.s                 |   167 -
 go/arrow/math/float64_noasm.go                     |    25 -
 go/arrow/math/float64_s390x.go                     |    25 -
 go/arrow/math/float64_sse4_amd64.go                |    41 -
 go/arrow/math/float64_sse4_amd64.s                 |    94 -
 go/arrow/math/float64_test.go                      |    86 -
 go/arrow/math/int64.go                             |    47 -
 go/arrow/math/int64.tmpldata                       |     4 -
 go/arrow/math/int64_amd64.go                       |    33 -
 go/arrow/math/int64_avx2_amd64.go                  |    41 -
 go/arrow/math/int64_avx2_amd64.s                   |   173 -
 go/arrow/math/int64_noasm.go                       |    25 -
 go/arrow/math/int64_s390x.go                       |    25 -
 go/arrow/math/int64_sse4_amd64.go                  |    41 -
 go/arrow/math/int64_sse4_amd64.s                   |   100 -
 go/arrow/math/int64_test.go                        |    86 -
 go/arrow/math/math_amd64.go                        |    51 -
 go/arrow/math/math_noasm.go                        |    29 -
 go/arrow/math/math_s390x.go                        |    29 -
 go/arrow/math/type.go.tmpl                         |    48 -
 go/arrow/math/type_amd64.go.tmpl                   |    33 -
 go/arrow/math/type_noasm.go.tmpl                   |    25 -
 go/arrow/math/type_s390x.go.tmpl                   |    25 -
 go/arrow/math/type_simd_amd64.go.tmpl              |    42 -
 go/arrow/math/type_test.go.tmpl                    |    87 -
 go/arrow/math/uint64.go                            |    47 -
 go/arrow/math/uint64.tmpldata                      |     4 -
 go/arrow/math/uint64_amd64.go                      |    33 -
 go/arrow/math/uint64_avx2_amd64.go                 |    41 -
 go/arrow/math/uint64_avx2_amd64.s                  |   173 -
 go/arrow/math/uint64_noasm.go                      |    25 -
 go/arrow/math/uint64_s390x.go                      |    25 -
 go/arrow/math/uint64_sse4_amd64.go                 |    41 -
 go/arrow/math/uint64_sse4_amd64.s                  |   100 -
 go/arrow/math/uint64_test.go                       |    86 -
 go/arrow/memory/Makefile                           |    54 -
 go/arrow/memory/_lib/.gitignore                    |    18 -
 go/arrow/memory/_lib/CMakeLists.txt                |    22 -
 go/arrow/memory/_lib/arch.h                        |    27 -
 go/arrow/memory/_lib/memory.c                      |    27 -
 go/arrow/memory/_lib/memory_avx2.s                 |    97 -
 go/arrow/memory/_lib/memory_sse4.s                 |    96 -
 go/arrow/memory/allocator.go                       |    33 -
 go/arrow/memory/buffer.go                          |   125 -
 go/arrow/memory/buffer_test.go                     |    57 -
 go/arrow/memory/checked_allocator.go               |    74 -
 go/arrow/memory/doc.go                             |    20 -
 go/arrow/memory/go_allocator.go                    |    48 -
 go/arrow/memory/go_allocator_test.go               |    76 -
 go/arrow/memory/memory.go                          |    33 -
 go/arrow/memory/memory_amd64.go                    |    33 -
 go/arrow/memory/memory_avx2_amd64.go               |    41 -
 go/arrow/memory/memory_avx2_amd64.s                |    85 -
 go/arrow/memory/memory_js_wasm.go                  |    23 -
 go/arrow/memory/memory_noasm.go                    |    23 -
 go/arrow/memory/memory_sse4_amd64.go               |    31 -
 go/arrow/memory/memory_sse4_amd64.s                |    84 -
 go/arrow/memory/memory_test.go                     |   125 -
 go/arrow/memory/util.go                            |    37 -
 go/arrow/memory/util_test.go                       |    61 -
 go/arrow/numeric.schema.json                       |    15 -
 go/arrow/numeric.tmpldata                          |   141 -
 go/arrow/schema.go                                 |   193 -
 go/arrow/schema_test.go                            |   363 -
 go/arrow/tensor/numeric.gen.go                     |   327 -
 go/arrow/tensor/numeric.gen.go.tmpl                |    55 -
 go/arrow/tensor/numeric.gen_test.go                |  1170 --
 go/arrow/tensor/numeric.gen_test.go.tmpl           |   126 -
 go/arrow/tensor/tensor.go                          |   247 -
 go/arrow/tensor/tensor_test.go                     |   166 -
 go/arrow/type_string.go                            |    53 -
 go/arrow/type_traits_boolean.go                    |    28 -
 go/arrow/type_traits_decimal128.go                 |    75 -
 go/arrow/type_traits_float16.go                    |    74 -
 go/arrow/type_traits_interval.go                   |   126 -
 go/arrow/type_traits_numeric.gen.go                |   814 --
 go/arrow/type_traits_numeric.gen.go.tmpl           |    95 -
 go/arrow/type_traits_numeric.gen_test.go           |   570 -
 go/arrow/type_traits_numeric.gen_test.go.tmpl      |    61 -
 go/arrow/type_traits_test.go                       |   201 -
 go/parquet/.gitignore                              |    31 -
 go/parquet/LICENSE.txt                             |  1987 ----
 go/parquet/compress/brotli.go                      |   115 -
 go/parquet/compress/compress.go                    |   156 -
 go/parquet/compress/compress_test.go               |   138 -
 go/parquet/compress/gzip.go                        |    98 -
 go/parquet/compress/snappy.go                      |    62 -
 go/parquet/compress/zstd.go                        |   112 -
 go/parquet/doc.go                                  |    68 -
 go/parquet/encryption_properties.go                |   711 --
 go/parquet/encryption_properties_test.go           |   217 -
 go/parquet/go.mod                                  |    35 -
 go/parquet/go.sum                                  |   155 -
 go/parquet/internal/bmi/Makefile                   |    47 -
 go/parquet/internal/bmi/_lib/bitmap_bmi2.c         |    30 -
 go/parquet/internal/bmi/_lib/bitmap_bmi2.s         |   140 -
 go/parquet/internal/bmi/bitmap_bmi2.go             |    48 -
 go/parquet/internal/bmi/bitmap_bmi2.s              |   117 -
 go/parquet/internal/bmi/bmi_init.go                |    60 -
 go/parquet/internal/bmi/bmi_noasm.go               |   249 -
 go/parquet/internal/debug/assert_off.go            |    24 -
 go/parquet/internal/debug/assert_on.go             |    28 -
 go/parquet/internal/debug/doc.go                   |    23 -
 go/parquet/internal/encryption/aes.go              |   264 -
 go/parquet/internal/encryption/decryptor.go        |   261 -
 go/parquet/internal/encryption/encryptor.go        |   237 -
 go/parquet/internal/encryption/key_handling.go     |    62 -
 .../gen-go/parquet/GoUnusedProtection__.go         |     6 -
 .../internal/gen-go/parquet/parquet-consts.go      |    23 -
 go/parquet/internal/gen-go/parquet/parquet.go      | 10961 -------------------
 .../internal/gen-go/parquet/staticcheck.conf       |    17 -
 go/parquet/internal/testutils/random.go            |   452 -
 go/parquet/internal/testutils/random_arrow.go      |   488 -
 go/parquet/internal/thrift/helpers.go              |    87 -
 go/parquet/internal/utils/Makefile                 |    72 -
 go/parquet/internal/utils/_lib/arch.h              |    27 -
 go/parquet/internal/utils/_lib/bit_packing_avx2.c  |  1879 ----
 go/parquet/internal/utils/_lib/bit_packing_avx2.s  |  4012 -------
 go/parquet/internal/utils/_lib/min_max.c           |    73 -
 go/parquet/internal/utils/_lib/min_max_avx2.s      |  1366 ---
 go/parquet/internal/utils/_lib/min_max_sse4.s      |   613 --
 go/parquet/internal/utils/_lib/unpack_bool.c       |    30 -
 go/parquet/internal/utils/_lib/unpack_bool_avx2.s  |  6293 -----------
 go/parquet/internal/utils/_lib/unpack_bool_sse4.s  |   104 -
 go/parquet/internal/utils/bit_benchmark_test.go    |   220 -
 go/parquet/internal/utils/bit_block_counter.go     |   263 -
 .../internal/utils/bit_block_counter_test.go       |   201 -
 go/parquet/internal/utils/bit_packing.go           |    35 -
 go/parquet/internal/utils/bit_packing_avx2.go      |    53 -
 go/parquet/internal/utils/bit_packing_avx2.s       |  3439 ------
 go/parquet/internal/utils/bit_packing_default.go   |  1941 ----
 go/parquet/internal/utils/bit_packing_noasm.go     |    23 -
 go/parquet/internal/utils/bit_reader.go            |   348 -
 go/parquet/internal/utils/bit_reader_test.go       |   619 --
 go/parquet/internal/utils/bit_run_reader.go        |   148 -
 go/parquet/internal/utils/bit_run_reader_test.go   |   158 -
 go/parquet/internal/utils/bit_set_run_reader.go    |   345 -
 .../internal/utils/bit_set_run_reader_test.go      |   276 -
 go/parquet/internal/utils/bit_writer.go            |   182 -
 go/parquet/internal/utils/bitmap_reader.go         |    72 -
 go/parquet/internal/utils/bitmap_reader_test.go    |    75 -
 go/parquet/internal/utils/bitmap_writer.go         |   277 -
 go/parquet/internal/utils/bitmap_writer_test.go    |   304 -
 go/parquet/internal/utils/clib_amd64.s             |    87 -
 go/parquet/internal/utils/dictionary.go            |    87 -
 go/parquet/internal/utils/math.go                  |    49 -
 go/parquet/internal/utils/min_max.go               |   120 -
 go/parquet/internal/utils/min_max_amd64.go         |    43 -
 go/parquet/internal/utils/min_max_avx2.go          |    58 -
 go/parquet/internal/utils/min_max_avx2.s           |  1352 ---
 go/parquet/internal/utils/min_max_noasm.go         |    27 -
 go/parquet/internal/utils/min_max_sse4.go          |    56 -
 go/parquet/internal/utils/min_max_sse4.s           |   592 -
 go/parquet/internal/utils/physical_types.tmpldata  |    52 -
 go/parquet/internal/utils/rle.go                   |   583 -
 go/parquet/internal/utils/typed_rle_dict.gen.go    |  1375 ---
 .../internal/utils/typed_rle_dict.gen.go.tmpl      |   218 -
 go/parquet/internal/utils/unpack_bool.go           |    26 -
 go/parquet/internal/utils/unpack_bool_amd64.go     |    41 -
 go/parquet/internal/utils/unpack_bool_avx2.go      |    29 -
 go/parquet/internal/utils/unpack_bool_avx2.s       |  6961 ------------
 go/parquet/internal/utils/unpack_bool_noasm.go     |    25 -
 go/parquet/internal/utils/unpack_bool_sse4.go      |    29 -
 go/parquet/internal/utils/unpack_bool_sse4.s       |    88 -
 go/parquet/internal/utils/write_utils.go           |    57 -
 go/parquet/reader_properties.go                    |    79 -
 go/parquet/reader_writer_properties_test.go        |    69 -
 go/parquet/tools.go                                |    25 -
 go/parquet/types.go                                |   354 -
 go/parquet/writer_properties.go                    |   510 -
 402 files changed, 111043 deletions(-)

diff --git a/go/README.md b/go/README.md
deleted file mode 100644
index 6bf8438..0000000
--- a/go/README.md
+++ /dev/null
@@ -1,124 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-Apache Arrow for Go
-===================
-
-[![GoDoc](https://godoc.org/github.com/apache/arrow/go/arrow?status.svg)](https://godoc.org/github.com/apache/arrow/go/arrow)
-
-[Apache Arrow][arrow] is a cross-language development platform for in-memory
-data. It specifies a standardized language-independent columnar memory format
-for flat and hierarchical data, organized for efficient analytic operations on
-modern hardware. It also provides computational libraries and zero-copy
-streaming messaging and inter-process communication.
-
-
-Reference Counting
-------------------
-
-The library makes use of reference counting so that it can track when memory
-buffers are no longer used. This allows Arrow to update resource accounting,
-pool memory such and track overall memory usage as objects are created and
-released. Types expose two methods to deal with this pattern. The `Retain`
-method will increase the reference count by 1 and `Release` method will reduce
-the count by 1. Once the reference count of an object is zero, any associated
-object will be freed. `Retain` and `Release` are safe to call from multiple
-goroutines.
-
-### When to call `Retain` / `Release`?
-
-* If you are passed an object and wish to take ownership of it, you must call
-  `Retain`. You must later pair this with a call to `Release` when you no
-  longer need the object.  "Taking ownership" typically means you wish to
-  access the object outside the scope of the current function call.
-
-* You own any object you create via functions whose name begins with `New` or
-  `Copy` or when receiving an object over a channel. Therefore you must call
-  `Release` once you no longer need the object.
-
-* If you send an object over a channel, you must call `Retain` before sending
-  it as the receiver is assumed to own the object and will later call `Release`
-  when it no longer needs the object.
-
-Performance
------------
-
-The arrow package makes extensive use of [c2goasm][] to leverage LLVM's
-advanced optimizer and generate PLAN9 assembly functions from C/C++ code. The
-arrow package can be compiled without these optimizations using the `noasm`
-build tag. Alternatively, by configuring an environment variable, it is
-possible to dynamically configure which architecture optimizations are used at
-runtime.  See the `cpu` package [README](arrow/internal/cpu/README.md) for a
-description of this environment variable.
-
-### Example Usage
-
-The following benchmarks demonstrate summing an array of 8192 values using
-various optimizations.
-
-Disable no architecture optimizations (thus using AVX2):
-
-```sh
-$ INTEL_DISABLE_EXT=NONE go test -bench=8192 -run=. ./math
-goos: darwin
-goarch: amd64
-pkg: github.com/apache/arrow/go/arrow/math
-BenchmarkFloat64Funcs_Sum_8192-8   	 2000000	       687 ns/op	95375.41 MB/s
-BenchmarkInt64Funcs_Sum_8192-8     	 2000000	       719 ns/op	91061.06 MB/s
-BenchmarkUint64Funcs_Sum_8192-8    	 2000000	       691 ns/op	94797.29 MB/s
-PASS
-ok  	github.com/apache/arrow/go/arrow/math	6.444s
-```
-
-**NOTE:** `NONE` is simply ignored, thus enabling optimizations for AVX2 and SSE4
-
-----
-
-Disable AVX2 architecture optimizations:
-
-```sh
-$ INTEL_DISABLE_EXT=AVX2 go test -bench=8192 -run=. ./math
-goos: darwin
-goarch: amd64
-pkg: github.com/apache/arrow/go/arrow/math
-BenchmarkFloat64Funcs_Sum_8192-8   	 1000000	      1912 ns/op	34263.63 MB/s
-BenchmarkInt64Funcs_Sum_8192-8     	 1000000	      1392 ns/op	47065.57 MB/s
-BenchmarkUint64Funcs_Sum_8192-8    	 1000000	      1405 ns/op	46636.41 MB/s
-PASS
-ok  	github.com/apache/arrow/go/arrow/math	4.786s
-```
-
-----
-
-Disable ALL architecture optimizations, thus using pure Go implementation:
-
-```sh
-$ INTEL_DISABLE_EXT=ALL go test -bench=8192 -run=. ./math
-goos: darwin
-goarch: amd64
-pkg: github.com/apache/arrow/go/arrow/math
-BenchmarkFloat64Funcs_Sum_8192-8   	  200000	     10285 ns/op	6371.41 MB/s
-BenchmarkInt64Funcs_Sum_8192-8     	  500000	      3892 ns/op	16837.37 MB/s
-BenchmarkUint64Funcs_Sum_8192-8    	  500000	      3929 ns/op	16680.00 MB/s
-PASS
-ok  	github.com/apache/arrow/go/arrow/math	6.179s
-```
-
-[arrow]:    https://arrow.apache.org
-[c2goasm]:  https://github.com/minio/c2goasm
diff --git a/go/arrow/.editorconfig b/go/arrow/.editorconfig
deleted file mode 100644
index a7ceaf9..0000000
--- a/go/arrow/.editorconfig
+++ /dev/null
@@ -1,21 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-root = true
-
-[*.tmpl]
-indent_style = tab
-indent_size = 4
\ No newline at end of file
diff --git a/go/arrow/.gitignore b/go/arrow/.gitignore
deleted file mode 100644
index d4b831a..0000000
--- a/go/arrow/.gitignore
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-### Go template
-# Binaries for programs and plugins
-*.exe
-*.dll
-*.so
-*.dylib
-*.o
-
-# Test binary, build with `go test -c`
-*.test
-
-# Output of the go coverage tool, specifically when used with LiteIDE
-*.out
-
-# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
-.glide/
-
-bin/
-vendor/
\ No newline at end of file
diff --git a/go/arrow/Gopkg.lock b/go/arrow/Gopkg.lock
deleted file mode 100644
index 143e4f9..0000000
--- a/go/arrow/Gopkg.lock
+++ /dev/null
@@ -1,44 +0,0 @@
-# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
-
-
-[[projects]]
-  digest = "1:56c130d885a4aacae1dd9c7b71cfe39912c7ebc1ff7d2b46083c8812996dc43b"
-  name = "github.com/davecgh/go-spew"
-  packages = ["spew"]
-  pruneopts = ""
-  revision = "346938d642f2ec3594ed81d874461961cd0faa76"
-  version = "v1.1.0"
-
-[[projects]]
-  digest = "1:1d7e1867c49a6dd9856598ef7c3123604ea3daabf5b83f303ff457bcbc410b1d"
-  name = "github.com/pkg/errors"
-  packages = ["."]
-  pruneopts = ""
-  revision = "ba968bfe8b2f7e042a574c888954fccecfa385b4"
-  version = "v0.8.1"
-
-[[projects]]
-  digest = "1:256484dbbcd271f9ecebc6795b2df8cad4c458dd0f5fd82a8c2fa0c29f233411"
-  name = "github.com/pmezard/go-difflib"
-  packages = ["difflib"]
-  pruneopts = ""
-  revision = "792786c7400a136282c1664665ae0a8db921c6c2"
-  version = "v1.0.0"
-
-[[projects]]
-  digest = "1:2d0dc026c4aef5e2f3a0e06a4dabe268b840d8f63190cf6894e02134a03f52c5"
-  name = "github.com/stretchr/testify"
-  packages = ["assert"]
-  pruneopts = ""
-  revision = "b91bfb9ebec76498946beb6af7c0230c7cc7ba6c"
-  version = "v1.2.0"
-
-[solve-meta]
-  analyzer-name = "dep"
-  analyzer-version = 1
-  input-imports = [
-    "github.com/pkg/errors",
-    "github.com/stretchr/testify/assert",
-  ]
-  solver-name = "gps-cdcl"
-  solver-version = 1
diff --git a/go/arrow/Gopkg.toml b/go/arrow/Gopkg.toml
deleted file mode 100644
index b27807d..0000000
--- a/go/arrow/Gopkg.toml
+++ /dev/null
@@ -1,23 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-[[constraint]]
-  name = "github.com/stretchr/testify"
-  version = "1.2.0"
-
-[[constraint]]
-  name = "github.com/pkg/errors"
-  version = "0.8.1"
\ No newline at end of file
diff --git a/go/arrow/LICENSE.txt b/go/arrow/LICENSE.txt
deleted file mode 100644
index 6884e08..0000000
--- a/go/arrow/LICENSE.txt
+++ /dev/null
@@ -1,1987 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
---------------------------------------------------------------------------------
-
-src/plasma/fling.cc and src/plasma/fling.h: Apache 2.0
-
-Copyright 2013 Sharvil Nanavati
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
---------------------------------------------------------------------------------
-
-src/plasma/thirdparty/ae: Modified / 3-Clause BSD
-
-Copyright (c) 2006-2010, Salvatore Sanfilippo <antirez at gmail dot com>
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
- * Redistributions of source code must retain the above copyright notice,
-   this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above copyright
-   notice, this list of conditions and the following disclaimer in the
-   documentation and/or other materials provided with the distribution.
- * Neither the name of Redis nor the names of its contributors may be used
-   to endorse or promote products derived from this software without
-   specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
-LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-src/plasma/thirdparty/dlmalloc.c: CC0
-
-This is a version (aka dlmalloc) of malloc/free/realloc written by
-Doug Lea and released to the public domain, as explained at
-http://creativecommons.org/publicdomain/zero/1.0/ Send questions,
-comments, complaints, performance data, etc to dl@cs.oswego.edu
-
---------------------------------------------------------------------------------
-
-src/plasma/common.cc (some portions)
-
-Copyright (c) Austin Appleby (aappleby (AT) gmail)
-
-Some portions of this file are derived from code in the MurmurHash project
-
-All code is released to the public domain. For business purposes, Murmurhash is
-under the MIT license.
-
-https://sites.google.com/site/murmurhash/
-
---------------------------------------------------------------------------------
-
-src/arrow/util (some portions): Apache 2.0, and 3-clause BSD
-
-Some portions of this module are derived from code in the Chromium project,
-copyright (c) Google inc and (c) The Chromium Authors and licensed under the
-Apache 2.0 License or the under the 3-clause BSD license:
-
-  Copyright (c) 2013 The Chromium Authors. All rights reserved.
-
-  Redistribution and use in source and binary forms, with or without
-  modification, are permitted provided that the following conditions are
-  met:
-
-     * Redistributions of source code must retain the above copyright
-  notice, this list of conditions and the following disclaimer.
-     * Redistributions in binary form must reproduce the above
-  copyright notice, this list of conditions and the following disclaimer
-  in the documentation and/or other materials provided with the
-  distribution.
-     * Neither the name of Google Inc. nor the names of its
-  contributors may be used to endorse or promote products derived from
-  this software without specific prior written permission.
-
-  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-This project includes code from Daniel Lemire's FrameOfReference project.
-
-https://github.com/lemire/FrameOfReference/blob/6ccaf9e97160f9a3b299e23a8ef739e711ef0c71/src/bpacking.cpp
-
-Copyright: 2013 Daniel Lemire
-Home page: http://lemire.me/en/
-Project page: https://github.com/lemire/FrameOfReference
-License: Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0
-
---------------------------------------------------------------------------------
-
-This project includes code from the TensorFlow project
-
-Copyright 2015 The TensorFlow Authors. All Rights Reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
---------------------------------------------------------------------------------
-
-This project includes code from the NumPy project.
-
-https://github.com/numpy/numpy/blob/e1f191c46f2eebd6cb892a4bfe14d9dd43a06c4e/numpy/core/src/multiarray/multiarraymodule.c#L2910
-
-https://github.com/numpy/numpy/blob/68fd82271b9ea5a9e50d4e761061dfcca851382a/numpy/core/src/multiarray/datetime.c
-
-Copyright (c) 2005-2017, NumPy Developers.
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    * Redistributions of source code must retain the above copyright
-       notice, this list of conditions and the following disclaimer.
-
-    * Redistributions in binary form must reproduce the above
-       copyright notice, this list of conditions and the following
-       disclaimer in the documentation and/or other materials provided
-       with the distribution.
-
-    * Neither the name of the NumPy Developers nor the names of any
-       contributors may be used to endorse or promote products derived
-       from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-This project includes code from the Boost project
-
-Boost Software License - Version 1.0 - August 17th, 2003
-
-Permission is hereby granted, free of charge, to any person or organization
-obtaining a copy of the software and accompanying documentation covered by
-this license (the "Software") to use, reproduce, display, distribute,
-execute, and transmit the Software, and to prepare derivative works of the
-Software, and to permit third-parties to whom the Software is furnished to
-do so, all subject to the following:
-
-The copyright notices in the Software and this entire statement, including
-the above license grant, this restriction and the following disclaimer,
-must be included in all copies of the Software, in whole or in part, and
-all derivative works of the Software, unless such copies or derivative
-works are solely in the form of machine-executable object code generated by
-a source language processor.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
-SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
-FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
-ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
---------------------------------------------------------------------------------
-
-This project includes code from the FlatBuffers project
-
-Copyright 2014 Google Inc.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
---------------------------------------------------------------------------------
-
-This project includes code from the tslib project
-
-Copyright 2015 Microsoft Corporation. All rights reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
---------------------------------------------------------------------------------
-
-This project includes code from the jemalloc project
-
-https://github.com/jemalloc/jemalloc
-
-Copyright (C) 2002-2017 Jason Evans <ja...@canonware.com>.
-All rights reserved.
-Copyright (C) 2007-2012 Mozilla Foundation.  All rights reserved.
-Copyright (C) 2009-2017 Facebook, Inc.  All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-1. Redistributions of source code must retain the above copyright notice(s),
-   this list of conditions and the following disclaimer.
-2. Redistributions in binary form must reproduce the above copyright notice(s),
-   this list of conditions and the following disclaimer in the documentation
-   and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY EXPRESS
-OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO
-EVENT SHALL THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
-INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
-OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------------
-
-This project includes code from the Go project, BSD 3-clause license + PATENTS
-weak patent termination clause
-(https://github.com/golang/go/blob/master/PATENTS).
-
-Copyright (c) 2009 The Go Authors. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-   * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-   * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-   * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-This project includes code from the hs2client
-
-https://github.com/cloudera/hs2client
-
-Copyright 2016 Cloudera Inc.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
---------------------------------------------------------------------------------
-
-The script ci/scripts/util_wait_for_it.sh has the following license
-
-Copyright (c) 2016 Giles Hall
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
---------------------------------------------------------------------------------
-
-The script r/configure has the following license (MIT)
-
-Copyright (c) 2017, Jeroen Ooms and Jim Hester
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
---------------------------------------------------------------------------------
-
-cpp/src/arrow/util/logging.cc, cpp/src/arrow/util/logging.h and
-cpp/src/arrow/util/logging-test.cc are adapted from
-Ray Project (https://github.com/ray-project/ray) (Apache 2.0).
-
-Copyright (c) 2016 Ray Project (https://github.com/ray-project/ray)
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
---------------------------------------------------------------------------------
-The files cpp/src/arrow/vendored/datetime/date.h, cpp/src/arrow/vendored/datetime/tz.h,
-cpp/src/arrow/vendored/datetime/tz_private.h, cpp/src/arrow/vendored/datetime/ios.h,
-cpp/src/arrow/vendored/datetime/ios.mm,
-cpp/src/arrow/vendored/datetime/tz.cpp are adapted from
-Howard Hinnant's date library (https://github.com/HowardHinnant/date)
-It is licensed under MIT license.
-
-The MIT License (MIT)
-Copyright (c) 2015, 2016, 2017 Howard Hinnant
-Copyright (c) 2016 Adrian Colomitchi
-Copyright (c) 2017 Florian Dang
-Copyright (c) 2017 Paul Thompson
-Copyright (c) 2018 Tomasz Kamiński
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
---------------------------------------------------------------------------------
-
-The file cpp/src/arrow/util/utf8.h includes code adapted from the page
-  https://bjoern.hoehrmann.de/utf-8/decoder/dfa/
-with the following license (MIT)
-
-Copyright (c) 2008-2009 Bjoern Hoehrmann <bj...@hoehrmann.de>
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
---------------------------------------------------------------------------------
-
-The file cpp/src/arrow/vendored/string_view.hpp has the following license
-
-Boost Software License - Version 1.0 - August 17th, 2003
-
-Permission is hereby granted, free of charge, to any person or organization
-obtaining a copy of the software and accompanying documentation covered by
-this license (the "Software") to use, reproduce, display, distribute,
-execute, and transmit the Software, and to prepare derivative works of the
-Software, and to permit third-parties to whom the Software is furnished to
-do so, all subject to the following:
-
-The copyright notices in the Software and this entire statement, including
-the above license grant, this restriction and the following disclaimer,
-must be included in all copies of the Software, in whole or in part, and
-all derivative works of the Software, unless such copies or derivative
-works are solely in the form of machine-executable object code generated by
-a source language processor.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
-SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
-FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
-ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
---------------------------------------------------------------------------------
-
-The files in cpp/src/arrow/vendored/xxhash/ have the following license
-(BSD 2-Clause License)
-
-xxHash Library
-Copyright (c) 2012-2014, Yann Collet
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
-
-* Redistributions of source code must retain the above copyright notice, this
-  list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright notice, this
-  list of conditions and the following disclaimer in the documentation and/or
-  other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-You can contact the author at :
-- xxHash homepage: http://www.xxhash.com
-- xxHash source repository : https://github.com/Cyan4973/xxHash
-
---------------------------------------------------------------------------------
-
-The files in cpp/src/arrow/vendored/double-conversion/ have the following license
-(BSD 3-Clause License)
-
-Copyright 2006-2011, the V8 project authors. All rights reserved.
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    * Redistributions of source code must retain the above copyright
-      notice, this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above
-      copyright notice, this list of conditions and the following
-      disclaimer in the documentation and/or other materials provided
-      with the distribution.
-    * Neither the name of Google Inc. nor the names of its
-      contributors may be used to endorse or promote products derived
-      from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-The files in cpp/src/arrow/vendored/uriparser/ have the following license
-(BSD 3-Clause License)
-
-uriparser - RFC 3986 URI parsing library
-
-Copyright (C) 2007, Weijia Song <so...@gmail.com>
-Copyright (C) 2007, Sebastian Pipping <se...@pipping.org>
-All rights reserved.
-
-Redistribution  and use in source and binary forms, with or without
-modification,  are permitted provided that the following conditions
-are met:
-
-    * Redistributions   of  source  code  must  retain  the   above
-      copyright  notice, this list of conditions and the  following
-      disclaimer.
-
-    * Redistributions  in  binary  form must  reproduce  the  above
-      copyright  notice, this list of conditions and the  following
-      disclaimer   in  the  documentation  and/or  other  materials
-      provided with the distribution.
-
-    * Neither  the name of the <ORGANIZATION> nor the names of  its
-      contributors  may  be  used to endorse  or  promote  products
-      derived  from  this software without specific  prior  written
-      permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS  IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT  NOT
-LIMITED  TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND  FITNESS
-FOR  A  PARTICULAR  PURPOSE ARE DISCLAIMED. IN NO EVENT  SHALL  THE
-COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-INCIDENTAL,    SPECIAL,   EXEMPLARY,   OR   CONSEQUENTIAL   DAMAGES
-(INCLUDING,  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES;  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-STRICT  LIABILITY,  OR  TORT (INCLUDING  NEGLIGENCE  OR  OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
-OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-The files under dev/tasks/conda-recipes have the following license
-
-BSD 3-clause license
-Copyright (c) 2015-2018, conda-forge
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
-
-1. Redistributions of source code must retain the above copyright notice, this
-   list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright notice,
-   this list of conditions and the following disclaimer in the documentation
-   and/or other materials provided with the distribution.
-
-3. Neither the name of the copyright holder nor the names of its contributors
-   may be used to endorse or promote products derived from this software without
-   specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
-TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
-THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-The files in cpp/src/arrow/vendored/utf8cpp/ have the following license
-
-Copyright 2006 Nemanja Trifunovic
-
-Permission is hereby granted, free of charge, to any person or organization
-obtaining a copy of the software and accompanying documentation covered by
-this license (the "Software") to use, reproduce, display, distribute,
-execute, and transmit the Software, and to prepare derivative works of the
-Software, and to permit third-parties to whom the Software is furnished to
-do so, all subject to the following:
-
-The copyright notices in the Software and this entire statement, including
-the above license grant, this restriction and the following disclaimer,
-must be included in all copies of the Software, in whole or in part, and
-all derivative works of the Software, unless such copies or derivative
-works are solely in the form of machine-executable object code generated by
-a source language processor.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
-SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
-FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
-ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
---------------------------------------------------------------------------------
-
-This project includes code from Apache Kudu.
-
- * cpp/cmake_modules/CompilerInfo.cmake is based on Kudu's cmake_modules/CompilerInfo.cmake
-
-Copyright: 2016 The Apache Software Foundation.
-Home page: https://kudu.apache.org/
-License: http://www.apache.org/licenses/LICENSE-2.0
-
---------------------------------------------------------------------------------
-
-This project includes code from Apache Impala (incubating), formerly
-Impala. The Impala code and rights were donated to the ASF as part of the
-Incubator process after the initial code imports into Apache Parquet.
-
-Copyright: 2012 Cloudera, Inc.
-Copyright: 2016 The Apache Software Foundation.
-Home page: http://impala.apache.org/
-License: http://www.apache.org/licenses/LICENSE-2.0
-
---------------------------------------------------------------------------------
-
-This project includes code from Apache Aurora.
-
-* dev/release/{release,changelog,release-candidate} are based on the scripts from
-  Apache Aurora
-
-Copyright: 2016 The Apache Software Foundation.
-Home page: https://aurora.apache.org/
-License: http://www.apache.org/licenses/LICENSE-2.0
-
---------------------------------------------------------------------------------
-
-This project includes code from the Google styleguide.
-
-* cpp/build-support/cpplint.py is based on the scripts from the Google styleguide.
-
-Copyright: 2009 Google Inc. All rights reserved.
-Homepage: https://github.com/google/styleguide
-License: 3-clause BSD
-
---------------------------------------------------------------------------------
-
-This project includes code from Snappy.
-
-* cpp/cmake_modules/{SnappyCMakeLists.txt,SnappyConfig.h} are based on code
-  from Google's Snappy project.
-
-Copyright: 2009 Google Inc. All rights reserved.
-Homepage: https://github.com/google/snappy
-License: 3-clause BSD
-
---------------------------------------------------------------------------------
-
-This project includes code from the manylinux project.
-
-* python/manylinux1/scripts/{build_python.sh,python-tag-abi-tag.py,
-  requirements.txt} are based on code from the manylinux project.
-
-Copyright: 2016 manylinux
-Homepage: https://github.com/pypa/manylinux
-License: The MIT License (MIT)
-
---------------------------------------------------------------------------------
-
-This project includes code from the cymove project:
-
-* python/pyarrow/includes/common.pxd includes code from the cymove project
-
-The MIT License (MIT)
-Copyright (c) 2019 Omer Ozarslan
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
-OR OTHER DEALINGS IN THE SOFTWARE.
-
---------------------------------------------------------------------------------
-
-The projects includes code from the Ursabot project under the dev/archery
-directory.
-
-License: BSD 2-Clause
-
-Copyright 2019 RStudio, Inc.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-1. Redistributions of source code must retain the above copyright notice, this
-   list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright notice,
-   this list of conditions and the following disclaimer in the documentation
-   and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-This project include code from CMake.
-
-* cpp/cmake_modules/FindGTest.cmake is based on code from CMake.
-
-Copyright: Copyright 2000-2019 Kitware, Inc. and Contributors
-Homepage: https://gitlab.kitware.com/cmake/cmake
-License: 3-clause BSD
-
---------------------------------------------------------------------------------
-
-This project include code from mingw-w64.
-
-* cpp/src/arrow/util/cpu-info.cc has a polyfill for mingw-w64 < 5
-
-Copyright (c) 2009 - 2013 by the mingw-w64 project
-Homepage: https://mingw-w64.org
-License: Zope Public License (ZPL) Version 2.1.
-
----------------------------------------------------------------------------------
-
-This project include code from Google's Asylo project.
-
-* cpp/src/arrow/result.h is based on status_or.h
-
-Copyright (c)  Copyright 2017 Asylo authors
-Homepage: https://asylo.dev/
-License: Apache 2.0
-
---------------------------------------------------------------------------------
-
-This project includes code from Google's protobuf project
-
-* cpp/src/arrow/result.h ARROW_ASSIGN_OR_RAISE is based off ASSIGN_OR_RETURN
-
-Copyright 2008 Google Inc.  All rights reserved.
-Homepage: https://developers.google.com/protocol-buffers/
-License:
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-    * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-Code generated by the Protocol Buffer compiler is owned by the owner
-of the input file used when generating it.  This code is not
-standalone and requires a support library to be linked with it.  This
-support library is itself covered by the above license.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency LLVM is statically linked in certain binary distributions.
-Additionally some sections of source code have been derived from sources in LLVM
-and have been clearly labeled as such. LLVM has the following license:
-
-==============================================================================
-LLVM Release License
-==============================================================================
-University of Illinois/NCSA
-Open Source License
-
-Copyright (c) 2003-2018 University of Illinois at Urbana-Champaign.
-All rights reserved.
-
-Developed by:
-
-    LLVM Team
-
-    University of Illinois at Urbana-Champaign
-
-    http://llvm.org
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal with
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-    * Redistributions of source code must retain the above copyright notice,
-      this list of conditions and the following disclaimers.
-
-    * Redistributions in binary form must reproduce the above copyright notice,
-      this list of conditions and the following disclaimers in the
-      documentation and/or other materials provided with the distribution.
-
-    * Neither the names of the LLVM Team, University of Illinois at
-      Urbana-Champaign, nor the names of its contributors may be used to
-      endorse or promote products derived from this Software without specific
-      prior written permission.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
-CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
-SOFTWARE.
-
-==============================================================================
-Copyrights and Licenses for Third Party Software Distributed with LLVM:
-==============================================================================
-The LLVM software contains code written by third parties.  Such software will
-have its own individual LICENSE.TXT file in the directory in which it appears.
-This file will describe the copyrights, license, and restrictions which apply
-to that code.
-
-The disclaimer of warranty in the University of Illinois Open Source License
-applies to all code in the LLVM Distribution, and nothing in any of the
-other licenses gives permission to use the names of the LLVM Team or the
-University of Illinois to endorse or promote products derived from this
-Software.
-
-The following pieces of software have additional or alternate copyrights,
-licenses, and/or restrictions:
-
-Program             Directory
--------             ---------
-Google Test         llvm/utils/unittest/googletest
-OpenBSD regex       llvm/lib/Support/{reg*, COPYRIGHT.regex}
-pyyaml tests        llvm/test/YAMLParser/{*.data, LICENSE.TXT}
-ARM contributions   llvm/lib/Target/ARM/LICENSE.TXT
-md5 contributions   llvm/lib/Support/MD5.cpp llvm/include/llvm/Support/MD5.h
-
---------------------------------------------------------------------------------
-
-3rdparty dependency gRPC is statically linked in certain binary
-distributions, like the python wheels. gRPC has the following license:
-
-Copyright 2014 gRPC authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency Apache Thrift is statically linked in certain binary
-distributions, like the python wheels. Apache Thrift has the following license:
-
-Apache Thrift
-Copyright (C) 2006 - 2019, The Apache Software Foundation
-
-This product includes software developed at
-The Apache Software Foundation (http://www.apache.org/).
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency Apache ORC is statically linked in certain binary
-distributions, like the python wheels. Apache ORC has the following license:
-
-Apache ORC
-Copyright 2013-2019 The Apache Software Foundation
-
-This product includes software developed by The Apache Software
-Foundation (http://www.apache.org/).
-
-This product includes software developed by Hewlett-Packard:
-(c) Copyright [2014-2015] Hewlett-Packard Development Company, L.P
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency zstd is statically linked in certain binary
-distributions, like the python wheels. ZSTD has the following license:
-
-BSD License
-
-For Zstandard software
-
-Copyright (c) 2016-present, Facebook, Inc. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
-
- * Redistributions of source code must retain the above copyright notice, this
-   list of conditions and the following disclaimer.
-
- * Redistributions in binary form must reproduce the above copyright notice,
-   this list of conditions and the following disclaimer in the documentation
-   and/or other materials provided with the distribution.
-
- * Neither the name Facebook nor the names of its contributors may be used to
-   endorse or promote products derived from this software without specific
-   prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency lz4 is statically linked in certain binary
-distributions, like the python wheels. lz4 has the following license:
-
-LZ4 Library
-Copyright (c) 2011-2016, Yann Collet
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
-
-* Redistributions of source code must retain the above copyright notice, this
-  list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright notice, this
-  list of conditions and the following disclaimer in the documentation and/or
-  other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency Brotli is statically linked in certain binary
-distributions, like the python wheels. Brotli has the following license:
-
-Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency rapidjson is statically linked in certain binary
-distributions, like the python wheels. rapidjson and its dependencies have the
-following licenses:
-
-Tencent is pleased to support the open source community by making RapidJSON
-available.
-
-Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip.
-All rights reserved.
-
-If you have downloaded a copy of the RapidJSON binary from Tencent, please note
-that the RapidJSON binary is licensed under the MIT License.
-If you have downloaded a copy of the RapidJSON source code from Tencent, please
-note that RapidJSON source code is licensed under the MIT License, except for
-the third-party components listed below which are subject to different license
-terms.  Your integration of RapidJSON into your own projects may require
-compliance with the MIT License, as well as the other licenses applicable to
-the third-party components included within RapidJSON. To avoid the problematic
-JSON license in your own projects, it's sufficient to exclude the
-bin/jsonchecker/ directory, as it's the only code under the JSON license.
-A copy of the MIT License is included in this file.
-
-Other dependencies and licenses:
-
-    Open Source Software Licensed Under the BSD License:
-    --------------------------------------------------------------------
-
-    The msinttypes r29
-    Copyright (c) 2006-2013 Alexander Chemeris
-    All rights reserved.
-
-    Redistribution and use in source and binary forms, with or without
-    modification, are permitted provided that the following conditions are met:
-
-    * Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
-    * Neither the name of  copyright holder nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-    THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
-    EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-    WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-    DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR
-    ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-    DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-    SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-    CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-    LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-    OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
-    DAMAGE.
-
-    Open Source Software Licensed Under the JSON License:
-    --------------------------------------------------------------------
-
-    json.org
-    Copyright (c) 2002 JSON.org
-    All Rights Reserved.
-
-    JSON_checker
-    Copyright (c) 2002 JSON.org
-    All Rights Reserved.
-
-
-    Terms of the JSON License:
-    ---------------------------------------------------
-
-    Permission is hereby granted, free of charge, to any person obtaining a
-    copy of this software and associated documentation files (the "Software"),
-    to deal in the Software without restriction, including without limitation
-    the rights to use, copy, modify, merge, publish, distribute, sublicense,
-    and/or sell copies of the Software, and to permit persons to whom the
-    Software is furnished to do so, subject to the following conditions:
-
-    The above copyright notice and this permission notice shall be included in
-    all copies or substantial portions of the Software.
-
-    The Software shall be used for Good, not Evil.
-
-    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-    DEALINGS IN THE SOFTWARE.
-
-
-    Terms of the MIT License:
-    --------------------------------------------------------------------
-
-    Permission is hereby granted, free of charge, to any person obtaining a
-    copy of this software and associated documentation files (the "Software"),
-    to deal in the Software without restriction, including without limitation
-    the rights to use, copy, modify, merge, publish, distribute, sublicense,
-    and/or sell copies of the Software, and to permit persons to whom the
-    Software is furnished to do so, subject to the following conditions:
-
-    The above copyright notice and this permission notice shall be included
-    in all copies or substantial portions of the Software.
-
-    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-    DEALINGS IN THE SOFTWARE.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency snappy is statically linked in certain binary
-distributions, like the python wheels. snappy has the following license:
-
-Copyright 2011, Google Inc.
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    * Redistributions of source code must retain the above copyright notice,
-      this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above copyright notice,
-      this list of conditions and the following disclaimer in the documentation
-      and/or other materials provided with the distribution.
-    * Neither the name of Google Inc. nor the names of its contributors may be
-      used to endorse or promote products derived from this software without
-      specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-===
-
-Some of the benchmark data in testdata/ is licensed differently:
-
- - fireworks.jpeg is Copyright 2013 Steinar H. Gunderson, and
-   is licensed under the Creative Commons Attribution 3.0 license
-   (CC-BY-3.0). See https://creativecommons.org/licenses/by/3.0/
-   for more information.
-
- - kppkn.gtb is taken from the Gaviota chess tablebase set, and
-   is licensed under the MIT License. See
-   https://sites.google.com/site/gaviotachessengine/Home/endgame-tablebases-1
-   for more information.
-
- - paper-100k.pdf is an excerpt (bytes 92160 to 194560) from the paper
-   “Combinatorial Modeling of Chromatin Features Quantitatively Predicts DNA
-   Replication Timing in _Drosophila_” by Federico Comoglio and Renato Paro,
-   which is licensed under the CC-BY license. See
-   http://www.ploscompbiol.org/static/license for more ifnormation.
-
- - alice29.txt, asyoulik.txt, plrabn12.txt and lcet10.txt are from Project
-   Gutenberg. The first three have expired copyrights and are in the public
-   domain; the latter does not have expired copyright, but is still in the
-   public domain according to the license information
-   (http://www.gutenberg.org/ebooks/53).
-
---------------------------------------------------------------------------------
-
-3rdparty dependency gflags is statically linked in certain binary
-distributions, like the python wheels. gflags has the following license:
-
-Copyright (c) 2006, Google Inc.
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-    * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency glog is statically linked in certain binary
-distributions, like the python wheels. glog has the following license:
-
-Copyright (c) 2008, Google Inc.
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-    * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-
-A function gettimeofday in utilities.cc is based on
-
-http://www.google.com/codesearch/p?hl=en#dR3YEbitojA/COPYING&q=GetSystemTimeAsFileTime%20license:bsd
-
-The license of this code is:
-
-Copyright (c) 2003-2008, Jouni Malinen <j...@w1.fi> and contributors
-All Rights Reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-1. Redistributions of source code must retain the above copyright
-   notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
-   notice, this list of conditions and the following disclaimer in the
-   documentation and/or other materials provided with the distribution.
-
-3. Neither the name(s) of the above-listed copyright holder(s) nor the
-   names of its contributors may be used to endorse or promote products
-   derived from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency re2 is statically linked in certain binary
-distributions, like the python wheels. re2 has the following license:
-
-Copyright (c) 2009 The RE2 Authors. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    * Redistributions of source code must retain the above copyright
-      notice, this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above
-      copyright notice, this list of conditions and the following
-      disclaimer in the documentation and/or other materials provided
-      with the distribution.
-    * Neither the name of Google Inc. nor the names of its contributors
-      may be used to endorse or promote products derived from this
-      software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency c-ares is statically linked in certain binary
-distributions, like the python wheels. c-ares has the following license:
-
-# c-ares license
-
-Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS
-file.
-
-Copyright 1998 by the Massachusetts Institute of Technology.
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted, provided that
-the above copyright notice appear in all copies and that both that copyright
-notice and this permission notice appear in supporting documentation, and that
-the name of M.I.T. not be used in advertising or publicity pertaining to
-distribution of the software without specific, written prior permission.
-M.I.T. makes no representations about the suitability of this software for any
-purpose.  It is provided "as is" without express or implied warranty.
-
---------------------------------------------------------------------------------
-
-3rdparty dependency zlib is redistributed as a dynamically linked shared
-library in certain binary distributions, like the python wheels. In the future
-this will likely change to static linkage. zlib has the following license:
-
-zlib.h -- interface of the 'zlib' general purpose compression library
-  version 1.2.11, January 15th, 2017
-
-  Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
-
-  This software is provided 'as-is', without any express or implied
-  warranty.  In no event will the authors be held liable for any damages
-  arising from the use of this software.
-
-  Permission is granted to anyone to use this software for any purpose,
-  including commercial applications, and to alter it and redistribute it
-  freely, subject to the following restrictions:
-
-  1. The origin of this software must not be misrepresented; you must not
-     claim that you wrote the original software. If you use this software
-     in a product, an acknowledgment in the product documentation would be
-     appreciated but is not required.
-  2. Altered source versions must be plainly marked as such, and must not be
-     misrepresented as being the original software.
-  3. This notice may not be removed or altered from any source distribution.
-
-  Jean-loup Gailly        Mark Adler
-  jloup@gzip.org          madler@alumni.caltech.edu
-
---------------------------------------------------------------------------------
-
-3rdparty dependency openssl is redistributed as a dynamically linked shared
-library in certain binary distributions, like the python wheels. openssl
-preceding version 3 has the following license:
-
-  LICENSE ISSUES
-  ==============
-
-  The OpenSSL toolkit stays under a double license, i.e. both the conditions of
-  the OpenSSL License and the original SSLeay license apply to the toolkit.
-  See below for the actual license texts.
-
-  OpenSSL License
-  ---------------
-
-/* ====================================================================
- * Copyright (c) 1998-2019 The OpenSSL Project.  All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * 1. Redistributions of source code must retain the above copyright
- *    notice, this list of conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright
- *    notice, this list of conditions and the following disclaimer in
- *    the documentation and/or other materials provided with the
- *    distribution.
- *
- * 3. All advertising materials mentioning features or use of this
- *    software must display the following acknowledgment:
- *    "This product includes software developed by the OpenSSL Project
- *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
- *
- * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
- *    endorse or promote products derived from this software without
- *    prior written permission. For written permission, please contact
- *    openssl-core@openssl.org.
- *
- * 5. Products derived from this software may not be called "OpenSSL"
- *    nor may "OpenSSL" appear in their names without prior written
- *    permission of the OpenSSL Project.
- *
- * 6. Redistributions of any form whatsoever must retain the following
- *    acknowledgment:
- *    "This product includes software developed by the OpenSSL Project
- *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
- *
- * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
- * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
- * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
- * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
- * OF THE POSSIBILITY OF SUCH DAMAGE.
- * ====================================================================
- *
- * This product includes cryptographic software written by Eric Young
- * (eay@cryptsoft.com).  This product includes software written by Tim
- * Hudson (tjh@cryptsoft.com).
- *
- */
-
- Original SSLeay License
- -----------------------
-
-/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
- * All rights reserved.
- *
- * This package is an SSL implementation written
- * by Eric Young (eay@cryptsoft.com).
- * The implementation was written so as to conform with Netscapes SSL.
- *
- * This library is free for commercial and non-commercial use as long as
- * the following conditions are aheared to.  The following conditions
- * apply to all code found in this distribution, be it the RC4, RSA,
- * lhash, DES, etc., code; not just the SSL code.  The SSL documentation
- * included with this distribution is covered by the same copyright terms
- * except that the holder is Tim Hudson (tjh@cryptsoft.com).
- *
- * Copyright remains Eric Young's, and as such any Copyright notices in
- * the code are not to be removed.
- * If this package is used in a product, Eric Young should be given attribution
- * as the author of the parts of the library used.
- * This can be in the form of a textual message at program startup or
- * in documentation (online or textual) provided with the package.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the copyright
- *    notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- *    notice, this list of conditions and the following disclaimer in the
- *    documentation and/or other materials provided with the distribution.
- * 3. All advertising materials mentioning features or use of this software
- *    must display the following acknowledgement:
- *    "This product includes cryptographic software written by
- *     Eric Young (eay@cryptsoft.com)"
- *    The word 'cryptographic' can be left out if the rouines from the library
- *    being used are not cryptographic related :-).
- * 4. If you include any Windows specific code (or a derivative thereof) from
- *    the apps directory (application code) you must include an acknowledgement:
- *    "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
- *
- * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
- * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
- * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
- * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
- * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
- * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
- * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
- * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
- * SUCH DAMAGE.
- *
- * The licence and distribution terms for any publically available version or
- * derivative of this code cannot be changed.  i.e. this code cannot simply be
- * copied and put under another distribution licence
- * [including the GNU Public Licence.]
- */
-
---------------------------------------------------------------------------------
-
-This project includes code from the rtools-backports project.
-
-* ci/scripts/PKGBUILD and ci/scripts/r_windows_build.sh are based on code
-  from the rtools-backports project.
-
-Copyright: Copyright (c) 2013 - 2019, Алексей and Jeroen Ooms.
-All rights reserved.
-Homepage: https://github.com/r-windows/rtools-backports
-License: 3-clause BSD
-
---------------------------------------------------------------------------------
-
-Some code from pandas has been adapted for the pyarrow codebase. pandas is
-available under the 3-clause BSD license, which follows:
-
-pandas license
-==============
-
-Copyright (c) 2011-2012, Lambda Foundry, Inc. and PyData Development Team
-All rights reserved.
-
-Copyright (c) 2008-2011 AQR Capital Management, LLC
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    * Redistributions of source code must retain the above copyright
-       notice, this list of conditions and the following disclaimer.
-
-    * Redistributions in binary form must reproduce the above
-       copyright notice, this list of conditions and the following
-       disclaimer in the documentation and/or other materials provided
-       with the distribution.
-
-    * Neither the name of the copyright holder nor the names of any
-       contributors may be used to endorse or promote products derived
-       from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
---------------------------------------------------------------------------------
-
-Some bits from DyND, in particular aspects of the build system, have been
-adapted from libdynd and dynd-python under the terms of the BSD 2-clause
-license
-
-The BSD 2-Clause License
-
-    Copyright (C) 2011-12, Dynamic NDArray Developers
-    All rights reserved.
-
-    Redistribution and use in source and binary forms, with or without
-    modification, are permitted provided that the following conditions are
-    met:
-
-        * Redistributions of source code must retain the above copyright
-           notice, this list of conditions and the following disclaimer.
-
-        * Redistributions in binary form must reproduce the above
-           copyright notice, this list of conditions and the following
-           disclaimer in the documentation and/or other materials provided
-           with the distribution.
-
-    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-Dynamic NDArray Developers list:
-
- * Mark Wiebe
- * Continuum Analytics
-
---------------------------------------------------------------------------------
-
-Some source code from Ibis (https://github.com/cloudera/ibis) has been adapted
-for PyArrow. Ibis is released under the Apache License, Version 2.0.
-
---------------------------------------------------------------------------------
-
-This project includes code from the autobrew project.
-
-* r/tools/autobrew and dev/tasks/homebrew-formulae/autobrew/apache-arrow.rb
-  are based on code from the autobrew project.
-
-Copyright (c) 2019, Jeroen Ooms
-License: MIT
-Homepage: https://github.com/jeroen/autobrew
-
---------------------------------------------------------------------------------
-
-dev/tasks/homebrew-formulae/apache-arrow.rb has the following license:
-
-BSD 2-Clause License
-
-Copyright (c) 2009-present, Homebrew contributors
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-* Redistributions of source code must retain the above copyright notice, this
-  list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright notice,
-  this list of conditions and the following disclaimer in the documentation
-  and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-----------------------------------------------------------------------
-
-cpp/src/arrow/vendored/base64.cpp has the following license
-
-ZLIB License
-
-Copyright (C) 2004-2017 René Nyffenegger
-
-This source code is provided 'as-is', without any express or implied
-warranty. In no event will the author be held liable for any damages arising
-from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including
-commercial applications, and to alter it and redistribute it freely, subject to
-the following restrictions:
-
-1. The origin of this source code must not be misrepresented; you must not
-   claim that you wrote the original source code. If you use this source code
-   in a product, an acknowledgment in the product documentation would be
-   appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be
-   misrepresented as being the original source code.
-
-3. This notice may not be removed or altered from any source distribution.
-
-René Nyffenegger rene.nyffenegger@adp-gmbh.ch
-
---------------------------------------------------------------------------------
-
-The file cpp/src/arrow/vendored/optional.hpp has the following license
-
-Boost Software License - Version 1.0 - August 17th, 2003
-
-Permission is hereby granted, free of charge, to any person or organization
-obtaining a copy of the software and accompanying documentation covered by
-this license (the "Software") to use, reproduce, display, distribute,
-execute, and transmit the Software, and to prepare derivative works of the
-Software, and to permit third-parties to whom the Software is furnished to
-do so, all subject to the following:
-
-The copyright notices in the Software and this entire statement, including
-the above license grant, this restriction and the following disclaimer,
-must be included in all copies of the Software, in whole or in part, and
-all derivative works of the Software, unless such copies or derivative
-works are solely in the form of machine-executable object code generated by
-a source language processor.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
-SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
-FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
-ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
---------------------------------------------------------------------------------
-
-The file cpp/src/arrow/vendored/musl/strptime.c has the following license
-
-Copyright © 2005-2020 Rich Felker, et al.
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/go/arrow/Makefile b/go/arrow/Makefile
deleted file mode 100644
index 9c4a232..0000000
--- a/go/arrow/Makefile
+++ /dev/null
@@ -1,54 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-GO_BUILD=go build
-GO_GEN=go generate
-GO_TEST?=go test
-GOPATH=$(realpath ../../../../../..)
-
-GO_SOURCES  := $(shell find . -path ./_lib -prune -o -name '*.go' -not -name '*_test.go')
-ALL_SOURCES := $(shell find . -path ./_lib -prune -o -name '*.go' -name '*.s' -not -name '*_test.go')
-SOURCES_NO_VENDOR := $(shell find . -path ./vendor -prune -o -name "*.go" -not -name '*_test.go' -print)
-
-.PHONEY: test bench assembly generate
-
-assembly:
-	@$(MAKE) -C memory assembly
-	@$(MAKE) -C math assembly
-
-generate: bin/tmpl
-	bin/tmpl -i -data=numeric.tmpldata type_traits_numeric.gen.go.tmpl type_traits_numeric.gen_test.go.tmpl array/numeric.gen.go.tmpl array/numericbuilder.gen_test.go.tmpl  array/numericbuilder.gen.go.tmpl array/bufferbuilder_numeric.gen.go.tmpl
-	bin/tmpl -i -data=datatype_numeric.gen.go.tmpldata datatype_numeric.gen.go.tmpl
-	@$(MAKE) -C math generate
-
-fmt: $(SOURCES_NO_VENDOR)
-	goimports -w $^
-
-bench: $(GO_SOURCES) | assembly
-	$(GO_TEST) $(GO_TEST_ARGS) -bench=. -run=- ./...
-
-bench-noasm: $(GO_SOURCES)
-	$(GO_TEST) $(GO_TEST_ARGS) -tags='noasm' -bench=. -run=- ./...
-
-test: $(GO_SOURCES) | assembly
-	$(GO_TEST) $(GO_TEST_ARGS) ./...
-
-test-noasm: $(GO_SOURCES)
-	$(GO_TEST) $(GO_TEST_ARGS) -tags='noasm' ./...
-
-bin/tmpl: _tools/tmpl/main.go
-	$(GO_BUILD) -o $@ ./_tools/tmpl
-
diff --git a/go/arrow/_examples/helloworld/main.go b/go/arrow/_examples/helloworld/main.go
deleted file mode 100644
index ab21f6b..0000000
--- a/go/arrow/_examples/helloworld/main.go
+++ /dev/null
@@ -1,32 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/math"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-func main() {
-	fb := array.NewFloat64Builder(memory.DefaultAllocator)
-
-	fb.AppendValues([]float64{1, 3, 5, 7, 9, 11}, nil)
-
-	vec := fb.NewFloat64Array()
-	math.Float64.Sum(vec)
-}
diff --git a/go/arrow/_tools/tmpl/main.go b/go/arrow/_tools/tmpl/main.go
deleted file mode 100644
index 0cabef3..0000000
--- a/go/arrow/_tools/tmpl/main.go
+++ /dev/null
@@ -1,267 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
-	"bytes"
-	"encoding/json"
-	"flag"
-	"fmt"
-	"go/format"
-	"io/ioutil"
-	"os"
-	"os/exec"
-	"path/filepath"
-	"strings"
-	"text/template"
-)
-
-const Ext = ".tmpl"
-
-type pathSpec struct {
-	in, out string
-}
-
-func (p *pathSpec) String() string { return p.in + " → " + p.out }
-func (p *pathSpec) IsGoFile() bool { return filepath.Ext(p.out) == ".go" }
-
-func parsePath(path string) (string, string) {
-	p := strings.IndexByte(path, '=')
-	if p == -1 {
-		if filepath.Ext(path) != Ext {
-			errExit("template file '%s' must have .tmpl extension", path)
-		}
-		return path, path[:len(path)-len(Ext)]
-	}
-
-	return path[:p], path[p+1:]
-}
-
-type data struct {
-	In interface{}
-	D  listValue
-}
-
-func errExit(format string, a ...interface{}) {
-	fmt.Fprintf(os.Stderr, format, a...)
-	fmt.Fprintln(os.Stderr)
-	os.Exit(1)
-}
-
-type listValue map[string]string
-
-func (l listValue) String() string {
-	res := make([]string, 0, len(l))
-	for k, v := range l {
-		res = append(res, fmt.Sprintf("%s=%s", k, v))
-	}
-	return strings.Join(res, ", ")
-}
-
-func (l listValue) Set(v string) error {
-	nv := strings.Split(v, "=")
-	if len(nv) != 2 {
-		return fmt.Errorf("expected NAME=VALUE, got %s", v)
-	}
-	l[nv[0]] = nv[1]
-	return nil
-}
-
-func main() {
-	var (
-		dataArg = flag.String("data", "", "input JSON data")
-		gi      = flag.Bool("i", false, "run goimports")
-		in      = &data{D: make(listValue)}
-	)
-
-	flag.Var(&in.D, "d", "-d NAME=VALUE")
-
-	flag.Parse()
-	if *dataArg == "" {
-		errExit("data option is required")
-	}
-
-	if *gi {
-		if _, err := exec.LookPath("goimports"); err != nil {
-			errExit("failed to find goimports: %s", err.Error())
-		}
-		formatter = formatSource
-	} else {
-		formatter = format.Source
-	}
-
-	paths := flag.Args()
-	if len(paths) == 0 {
-		errExit("no tmpl files specified")
-	}
-
-	specs := make([]pathSpec, len(paths))
-	for i, p := range paths {
-		in, out := parsePath(p)
-		specs[i] = pathSpec{in: in, out: out}
-	}
-
-	in.In = readData(*dataArg)
-	process(in, specs)
-}
-
-func mustReadAll(path string) []byte {
-	data, err := ioutil.ReadFile(path)
-	if err != nil {
-		errExit(err.Error())
-	}
-
-	return data
-}
-
-func readData(path string) interface{} {
-	data := mustReadAll(path)
-	var v interface{}
-	if err := json.Unmarshal(StripComments(data), &v); err != nil {
-		errExit("invalid JSON data: %s", err.Error())
-	}
-	return v
-}
-
-func fileMode(path string) os.FileMode {
-	stat, err := os.Stat(path)
-	if err != nil {
-		errExit(err.Error())
-	}
-	return stat.Mode()
-}
-
-var funcs = template.FuncMap{
-	"lower": strings.ToLower,
-	"upper": strings.ToUpper,
-}
-
-func process(data interface{}, specs []pathSpec) {
-	for _, spec := range specs {
-		var (
-			t   *template.Template
-			err error
-		)
-		t, err = template.New("gen").Funcs(funcs).Parse(string(mustReadAll(spec.in)))
-		if err != nil {
-			errExit("error processing template '%s': %s", spec.in, err.Error())
-		}
-
-		var buf bytes.Buffer
-		if spec.IsGoFile() {
-			// preamble
-			fmt.Fprintf(&buf, "// Code generated by %s. DO NOT EDIT.\n", spec.in)
-			fmt.Fprintln(&buf)
-		}
-		err = t.Execute(&buf, data)
-		if err != nil {
-			errExit("error executing template '%s': %s", spec.in, err.Error())
-		}
-
-		generated := buf.Bytes()
-		if spec.IsGoFile() {
-			generated, err = formatter(generated)
-			if err != nil {
-				errExit("error formatting '%s': %s", spec.in, err.Error())
-			}
-		}
-
-		ioutil.WriteFile(spec.out, generated, fileMode(spec.in))
-	}
-}
-
-var (
-	formatter func([]byte) ([]byte, error)
-)
-
-func formatSource(in []byte) ([]byte, error) {
-	r := bytes.NewReader(in)
-	cmd := exec.Command("goimports")
-	cmd.Stdin = r
-	out, err := cmd.Output()
-	if err != nil {
-		if ee, ok := err.(*exec.ExitError); ok {
-			return nil, fmt.Errorf("error running goimports: %s", string(ee.Stderr))
-		}
-		return nil, fmt.Errorf("error running goimports: %s", string(out))
-	}
-
-	return out, nil
-}
-
-func StripComments(raw []byte) []byte {
-	var (
-		quoted, esc bool
-		comment     bool
-	)
-
-	buf := bytes.Buffer{}
-
-	for i := 0; i < len(raw); i++ {
-		b := raw[i]
-
-		if comment {
-			switch b {
-			case '/':
-				comment = false
-				j := bytes.IndexByte(raw[i+1:], '\n')
-				if j == -1 {
-					i = len(raw)
-				} else {
-					i += j // keep new line
-				}
-			case '*':
-				j := bytes.Index(raw[i+1:], []byte("*/"))
-				if j == -1 {
-					i = len(raw)
-				} else {
-					i += j + 2
-					comment = false
-				}
-			}
-			continue
-		}
-
-		if esc {
-			esc = false
-			continue
-		}
-
-		if b == '\\' && quoted {
-			esc = true
-			continue
-		}
-
-		if b == '"' || b == '\'' {
-			quoted = !quoted
-		}
-
-		if b == '/' && !quoted {
-			comment = true
-			continue
-		}
-
-		buf.WriteByte(b)
-	}
-
-	if quoted || esc || comment {
-		// unexpected state, so return raw bytes
-		return raw
-	}
-
-	return buf.Bytes()
-}
diff --git a/go/arrow/_tools/tmpl/main_test.go b/go/arrow/_tools/tmpl/main_test.go
deleted file mode 100644
index 831cf79..0000000
--- a/go/arrow/_tools/tmpl/main_test.go
+++ /dev/null
@@ -1,73 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
-	"testing"
-)
-
-func TestStripComments(t *testing.T) {
-	tests := []struct {
-		name string
-		in   string
-		exp  string
-	}{
-		{name: "none", in: `[1,2,3]`, exp: `[1,2,3]`},
-		{name: "single-line, line comment at end", in: `[1,2,3] // foo bar`, exp: `[1,2,3] `},
-		{name: "single-line, block comment at end", in: `[1,2,3] /* foo bar */  `, exp: `[1,2,3]   `},
-		{name: "single-line, block comment at end", in: `[1,2,3] /* /* // */`, exp: `[1,2,3] `},
-		{name: "single-line, block comment in middle", in: `[1,/* foo bar */2,3]`, exp: `[1,2,3]`},
-		{name: "single-line, block comment in string", in: `[1,"/* foo bar */"]`, exp: `[1,"/* foo bar */"]`},
-		{name: "single-line, malformed block comment", in: `[1,2,/*]`, exp: `[1,2,/*]`},
-		{name: "single-line, malformed JSON", in: `[1,2,/]`, exp: `[1,2,/]`},
-
-		{
-			name: "multi-line",
-			in: `[
-  1,
-  2,
-  3
-]`,
-			exp: `[
-  1,
-  2,
-  3
-]`,
-		},
-		{
-			name: "multi-line, multiple line comments",
-			in: `[ // foo
-  1, // bar
-  2,
-  3
-] // fit`,
-			exp: `[ 
-  1, 
-  2,
-  3
-] `,
-		},
-	}
-	for _, test := range tests {
-		t.Run(test.name, func(t *testing.T) {
-			got := string(StripComments([]byte(test.in)))
-			if got != test.exp {
-				t.Errorf("got:\n%s\nexp:\n%s", got, test.exp)
-			}
-		})
-	}
-}
diff --git a/go/arrow/array/array.go b/go/arrow/array/array.go
deleted file mode 100644
index 9cbaef9..0000000
--- a/go/arrow/array/array.go
+++ /dev/null
@@ -1,208 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array // import "github.com/apache/arrow/go/arrow/array"
-
-import (
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-)
-
-// A type which satisfies array.Interface represents an immutable sequence of values.
-type Interface interface {
-	// DataType returns the type metadata for this instance.
-	DataType() arrow.DataType
-
-	// NullN returns the number of null values in the array.
-	NullN() int
-
-	// NullBitmapBytes returns a byte slice of the validity bitmap.
-	NullBitmapBytes() []byte
-
-	// IsNull returns true if value at index is null.
-	// NOTE: IsNull will panic if NullBitmapBytes is not empty and 0 > i ≥ Len.
-	IsNull(i int) bool
-
-	// IsValid returns true if value at index is not null.
-	// NOTE: IsValid will panic if NullBitmapBytes is not empty and 0 > i ≥ Len.
-	IsValid(i int) bool
-
-	Data() *Data
-
-	// Len returns the number of elements in the array.
-	Len() int
-
-	// Retain increases the reference count by 1.
-	// Retain may be called simultaneously from multiple goroutines.
-	Retain()
-
-	// Release decreases the reference count by 1.
-	// Release may be called simultaneously from multiple goroutines.
-	// When the reference count goes to zero, the memory is freed.
-	Release()
-}
-
-const (
-	// UnknownNullCount specifies the NullN should be calculated from the null bitmap buffer.
-	UnknownNullCount = -1
-)
-
-type array struct {
-	refCount        int64
-	data            *Data
-	nullBitmapBytes []byte
-}
-
-// Retain increases the reference count by 1.
-// Retain may be called simultaneously from multiple goroutines.
-func (a *array) Retain() {
-	atomic.AddInt64(&a.refCount, 1)
-}
-
-// Release decreases the reference count by 1.
-// Release may be called simultaneously from multiple goroutines.
-// When the reference count goes to zero, the memory is freed.
-func (a *array) Release() {
-	debug.Assert(atomic.LoadInt64(&a.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&a.refCount, -1) == 0 {
-		a.data.Release()
-		a.data, a.nullBitmapBytes = nil, nil
-	}
-}
-
-// DataType returns the type metadata for this instance.
-func (a *array) DataType() arrow.DataType { return a.data.dtype }
-
-// NullN returns the number of null values in the array.
-func (a *array) NullN() int {
-	if a.data.nulls < 0 {
-		a.data.nulls = a.data.length - bitutil.CountSetBits(a.nullBitmapBytes, a.data.offset, a.data.length)
-	}
-	return a.data.nulls
-}
-
-// NullBitmapBytes returns a byte slice of the validity bitmap.
-func (a *array) NullBitmapBytes() []byte { return a.nullBitmapBytes }
-
-func (a *array) Data() *Data { return a.data }
-
-// Len returns the number of elements in the array.
-func (a *array) Len() int { return a.data.length }
-
-// IsNull returns true if value at index is null.
-// NOTE: IsNull will panic if NullBitmapBytes is not empty and 0 > i ≥ Len.
-func (a *array) IsNull(i int) bool {
-	return len(a.nullBitmapBytes) != 0 && bitutil.BitIsNotSet(a.nullBitmapBytes, a.data.offset+i)
-}
-
-// IsValid returns true if value at index is not null.
-// NOTE: IsValid will panic if NullBitmapBytes is not empty and 0 > i ≥ Len.
-func (a *array) IsValid(i int) bool {
-	return len(a.nullBitmapBytes) == 0 || bitutil.BitIsSet(a.nullBitmapBytes, a.data.offset+i)
-}
-
-func (a *array) setData(data *Data) {
-	// Retain before releasing in case a.data is the same as data.
-	data.Retain()
-
-	if a.data != nil {
-		a.data.Release()
-	}
-
-	if len(data.buffers) > 0 && data.buffers[0] != nil {
-		a.nullBitmapBytes = data.buffers[0].Bytes()
-	}
-	a.data = data
-}
-
-func (a *array) Offset() int {
-	return a.data.Offset()
-}
-
-type arrayConstructorFn func(*Data) Interface
-
-var (
-	makeArrayFn [32]arrayConstructorFn
-)
-
-func unsupportedArrayType(data *Data) Interface {
-	panic("unsupported data type: " + data.dtype.ID().String())
-}
-
-func invalidDataType(data *Data) Interface {
-	panic("invalid data type: " + data.dtype.ID().String())
-}
-
-// MakeFromData constructs a strongly-typed array instance from generic Data.
-func MakeFromData(data *Data) Interface {
-	return makeArrayFn[byte(data.dtype.ID()&0x1f)](data)
-}
-
-// NewSlice constructs a zero-copy slice of the array with the indicated
-// indices i and j, corresponding to array[i:j].
-// The returned array must be Release()'d after use.
-//
-// NewSlice panics if the slice is outside the valid range of the input array.
-// NewSlice panics if j < i.
-func NewSlice(arr Interface, i, j int64) Interface {
-	data := NewSliceData(arr.Data(), i, j)
-	slice := MakeFromData(data)
-	data.Release()
-	return slice
-}
-
-func init() {
-	makeArrayFn = [...]arrayConstructorFn{
-		arrow.NULL:              func(data *Data) Interface { return NewNullData(data) },
-		arrow.BOOL:              func(data *Data) Interface { return NewBooleanData(data) },
-		arrow.UINT8:             func(data *Data) Interface { return NewUint8Data(data) },
-		arrow.INT8:              func(data *Data) Interface { return NewInt8Data(data) },
-		arrow.UINT16:            func(data *Data) Interface { return NewUint16Data(data) },
-		arrow.INT16:             func(data *Data) Interface { return NewInt16Data(data) },
-		arrow.UINT32:            func(data *Data) Interface { return NewUint32Data(data) },
-		arrow.INT32:             func(data *Data) Interface { return NewInt32Data(data) },
-		arrow.UINT64:            func(data *Data) Interface { return NewUint64Data(data) },
-		arrow.INT64:             func(data *Data) Interface { return NewInt64Data(data) },
-		arrow.FLOAT16:           func(data *Data) Interface { return NewFloat16Data(data) },
-		arrow.FLOAT32:           func(data *Data) Interface { return NewFloat32Data(data) },
-		arrow.FLOAT64:           func(data *Data) Interface { return NewFloat64Data(data) },
-		arrow.STRING:            func(data *Data) Interface { return NewStringData(data) },
-		arrow.BINARY:            func(data *Data) Interface { return NewBinaryData(data) },
-		arrow.FIXED_SIZE_BINARY: func(data *Data) Interface { return NewFixedSizeBinaryData(data) },
-		arrow.DATE32:            func(data *Data) Interface { return NewDate32Data(data) },
-		arrow.DATE64:            func(data *Data) Interface { return NewDate64Data(data) },
-		arrow.TIMESTAMP:         func(data *Data) Interface { return NewTimestampData(data) },
-		arrow.TIME32:            func(data *Data) Interface { return NewTime32Data(data) },
-		arrow.TIME64:            func(data *Data) Interface { return NewTime64Data(data) },
-		arrow.INTERVAL:          func(data *Data) Interface { return NewIntervalData(data) },
-		arrow.DECIMAL:           func(data *Data) Interface { return NewDecimal128Data(data) },
-		arrow.LIST:              func(data *Data) Interface { return NewListData(data) },
-		arrow.STRUCT:            func(data *Data) Interface { return NewStructData(data) },
-		arrow.UNION:             unsupportedArrayType,
-		arrow.DICTIONARY:        unsupportedArrayType,
-		arrow.MAP:               unsupportedArrayType,
-		arrow.EXTENSION:         unsupportedArrayType,
-		arrow.FIXED_SIZE_LIST:   func(data *Data) Interface { return NewFixedSizeListData(data) },
-		arrow.DURATION:          func(data *Data) Interface { return NewDurationData(data) },
-
-		// invalid data types to fill out array size 2⁵-1
-		31: invalidDataType,
-	}
-}
diff --git a/go/arrow/array/array_test.go b/go/arrow/array/array_test.go
deleted file mode 100644
index 48c2386..0000000
--- a/go/arrow/array/array_test.go
+++ /dev/null
@@ -1,301 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array_test
-
-import (
-	"testing"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/internal/testing/tools"
-	"github.com/apache/arrow/go/arrow/memory"
-	"github.com/stretchr/testify/assert"
-)
-
-type testDataType struct {
-	id arrow.Type
-}
-
-func (d *testDataType) ID() arrow.Type { return d.id }
-func (d *testDataType) Name() string   { panic("implement me") }
-func (d *testDataType) BitWidth() int  { return 8 }
-
-func TestMakeFromData(t *testing.T) {
-	tests := []struct {
-		name     string
-		d        arrow.DataType
-		size     int
-		child    []*array.Data
-		expPanic bool
-		expError string
-	}{
-		// supported types
-		{name: "null", d: &testDataType{arrow.NULL}},
-		{name: "bool", d: &testDataType{arrow.BOOL}},
-		{name: "uint8", d: &testDataType{arrow.UINT8}},
-		{name: "uint16", d: &testDataType{arrow.UINT16}},
-		{name: "uint32", d: &testDataType{arrow.UINT32}},
-		{name: "uint64", d: &testDataType{arrow.UINT64}},
-		{name: "int8", d: &testDataType{arrow.INT8}},
-		{name: "int16", d: &testDataType{arrow.INT16}},
-		{name: "int32", d: &testDataType{arrow.INT32}},
-		{name: "int64", d: &testDataType{arrow.INT64}},
-		{name: "float32", d: &testDataType{arrow.FLOAT32}},
-		{name: "float64", d: &testDataType{arrow.FLOAT64}},
-		{name: "string", d: &testDataType{arrow.STRING}, size: 3},
-		{name: "binary", d: &testDataType{arrow.BINARY}, size: 3},
-		{name: "fixed_size_binary", d: &testDataType{arrow.FIXED_SIZE_BINARY}},
-		{name: "date32", d: &testDataType{arrow.DATE32}},
-		{name: "date64", d: &testDataType{arrow.DATE64}},
-		{name: "timestamp", d: &testDataType{arrow.TIMESTAMP}},
-		{name: "time32", d: &testDataType{arrow.TIME32}},
-		{name: "time64", d: &testDataType{arrow.TIME64}},
-		{name: "month_interval", d: arrow.FixedWidthTypes.MonthInterval},
-		{name: "day_time_interval", d: arrow.FixedWidthTypes.DayTimeInterval},
-		{name: "decimal", d: &testDataType{arrow.DECIMAL}},
-
-		{name: "list", d: &testDataType{arrow.LIST}, child: []*array.Data{
-			array.NewData(&testDataType{arrow.INT64}, 0, make([]*memory.Buffer, 4), nil, 0, 0),
-			array.NewData(&testDataType{arrow.INT64}, 0, make([]*memory.Buffer, 4), nil, 0, 0),
-		}},
-
-		{name: "struct", d: &testDataType{arrow.STRUCT}},
-		{name: "struct", d: &testDataType{arrow.STRUCT}, child: []*array.Data{
-			array.NewData(&testDataType{arrow.INT64}, 0, make([]*memory.Buffer, 4), nil, 0, 0),
-			array.NewData(&testDataType{arrow.INT64}, 0, make([]*memory.Buffer, 4), nil, 0, 0),
-		}},
-
-		{name: "fixed_size_list", d: arrow.FixedSizeListOf(4, arrow.PrimitiveTypes.Int64), child: []*array.Data{
-			array.NewData(&testDataType{arrow.INT64}, 0, make([]*memory.Buffer, 4), nil, 0, 0),
-			array.NewData(&testDataType{arrow.INT64}, 0, make([]*memory.Buffer, 4), nil, 0, 0),
-		}},
-		{name: "duration", d: &testDataType{arrow.DURATION}},
-
-		// unsupported types
-		{name: "union", d: &testDataType{arrow.UNION}, expPanic: true, expError: "unsupported data type: UNION"},
-		{name: "dictionary", d: &testDataType{arrow.DICTIONARY}, expPanic: true, expError: "unsupported data type: DICTIONARY"},
-		{name: "map", d: &testDataType{arrow.Type(27)}, expPanic: true, expError: "unsupported data type: MAP"},
-		{name: "extension", d: &testDataType{arrow.Type(28)}, expPanic: true, expError: "unsupported data type: EXTENSION"},
-
-		// invalid types
-		{name: "invalid(-1)", d: &testDataType{arrow.Type(-1)}, expPanic: true, expError: "invalid data type: Type(-1)"},
-		{name: "invalid(31)", d: &testDataType{arrow.Type(31)}, expPanic: true, expError: "invalid data type: Type(31)"},
-	}
-	for _, test := range tests {
-		t.Run(test.name, func(t *testing.T) {
-			var b [4]*memory.Buffer
-			var n = 4
-			if test.size != 0 {
-				n = test.size
-			}
-			data := array.NewData(test.d, 0, b[:n], test.child, 0, 0)
-
-			if test.expPanic {
-				assert.PanicsWithValue(t, test.expError, func() {
-					array.MakeFromData(data)
-				})
-			} else {
-				assert.NotNil(t, array.MakeFromData(data))
-			}
-		})
-	}
-}
-
-func bbits(v ...int32) []byte {
-	return tools.IntsToBitsLSB(v...)
-}
-
-func TestArray_NullN(t *testing.T) {
-	tests := []struct {
-		name string
-		l    int
-		bm   []byte
-		n    int
-		exp  int
-	}{
-		{name: "unknown,l16", l: 16, bm: bbits(0x11001010, 0x00110011), n: array.UnknownNullCount, exp: 8},
-		{name: "unknown,l12,ignores last nibble", l: 12, bm: bbits(0x11001010, 0x00111111), n: array.UnknownNullCount, exp: 6},
-		{name: "unknown,l12,12 nulls", l: 12, bm: bbits(0x00000000, 0x00000000), n: array.UnknownNullCount, exp: 12},
-		{name: "unknown,l12,00 nulls", l: 12, bm: bbits(0x11111111, 0x11111111), n: array.UnknownNullCount, exp: 0},
-	}
-	for _, test := range tests {
-		t.Run(test.name, func(t *testing.T) {
-			buf := memory.NewBufferBytes(test.bm)
-			data := array.NewData(arrow.FixedWidthTypes.Boolean, test.l, []*memory.Buffer{buf, nil}, nil, test.n, 0)
-			buf.Release()
-			ar := array.MakeFromData(data)
-			data.Release()
-			got := ar.NullN()
-			ar.Release()
-			assert.Equal(t, test.exp, got)
-		})
-	}
-}
-
-func TestArraySlice(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	var (
-		valids = []bool{true, true, true, false, true, true}
-		vs     = []float64{1, 2, 3, 0, 4, 5}
-	)
-
-	b := array.NewFloat64Builder(pool)
-	defer b.Release()
-
-	for _, tc := range []struct {
-		i, j   int
-		panics bool
-		len    int
-	}{
-		{i: 0, j: len(valids), panics: false, len: len(valids)},
-		{i: len(valids), j: len(valids), panics: false, len: 0},
-		{i: 0, j: 1, panics: false, len: 1},
-		{i: 1, j: 1, panics: false, len: 0},
-		{i: 0, j: len(valids) + 1, panics: true},
-		{i: 2, j: 1, panics: true},
-		{i: len(valids) + 1, j: len(valids) + 1, panics: true},
-	} {
-		t.Run("", func(t *testing.T) {
-			b.AppendValues(vs, valids)
-
-			arr := b.NewFloat64Array()
-			defer arr.Release()
-
-			if got, want := arr.Len(), len(valids); got != want {
-				t.Fatalf("got=%d, want=%d", got, want)
-			}
-
-			if tc.panics {
-				defer func() {
-					e := recover()
-					if e == nil {
-						t.Fatalf("this should have panicked, but did not")
-					}
-				}()
-			}
-
-			slice := array.NewSlice(arr, int64(tc.i), int64(tc.j)).(*array.Float64)
-			defer slice.Release()
-
-			if got, want := slice.Len(), tc.len; got != want {
-				t.Fatalf("invalid slice length: got=%d, want=%d", got, want)
-			}
-		})
-	}
-}
-
-func TestArraySliceTypes(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	valids := []bool{true, true, true, false, true, true}
-
-	for _, tc := range []struct {
-		values  interface{}
-		builder array.Builder
-		append  func(b array.Builder, vs interface{})
-	}{
-		{
-			values:  []bool{true, false, true, false, true, false},
-			builder: array.NewBooleanBuilder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.BooleanBuilder).AppendValues(vs.([]bool), valids) },
-		},
-		{
-			values:  []uint8{1, 2, 3, 0, 4, 5},
-			builder: array.NewUint8Builder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.Uint8Builder).AppendValues(vs.([]uint8), valids) },
-		},
-		{
-			values:  []uint16{1, 2, 3, 0, 4, 5},
-			builder: array.NewUint16Builder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.Uint16Builder).AppendValues(vs.([]uint16), valids) },
-		},
-		{
-			values:  []uint32{1, 2, 3, 0, 4, 5},
-			builder: array.NewUint32Builder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.Uint32Builder).AppendValues(vs.([]uint32), valids) },
-		},
-		{
-			values:  []uint64{1, 2, 3, 0, 4, 5},
-			builder: array.NewUint64Builder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.Uint64Builder).AppendValues(vs.([]uint64), valids) },
-		},
-		{
-			values:  []int8{1, 2, 3, 0, 4, 5},
-			builder: array.NewInt8Builder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.Int8Builder).AppendValues(vs.([]int8), valids) },
-		},
-		{
-			values:  []int16{1, 2, 3, 0, 4, 5},
-			builder: array.NewInt16Builder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.Int16Builder).AppendValues(vs.([]int16), valids) },
-		},
-		{
-			values:  []int32{1, 2, 3, 0, 4, 5},
-			builder: array.NewInt32Builder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.Int32Builder).AppendValues(vs.([]int32), valids) },
-		},
-		{
-			values:  []int64{1, 2, 3, 0, 4, 5},
-			builder: array.NewInt64Builder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.Int64Builder).AppendValues(vs.([]int64), valids) },
-		},
-		{
-			values:  []float32{1, 2, 3, 0, 4, 5},
-			builder: array.NewFloat32Builder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.Float32Builder).AppendValues(vs.([]float32), valids) },
-		},
-		{
-			values:  []float64{1, 2, 3, 0, 4, 5},
-			builder: array.NewFloat64Builder(pool),
-			append:  func(b array.Builder, vs interface{}) { b.(*array.Float64Builder).AppendValues(vs.([]float64), valids) },
-		},
-	} {
-		t.Run("", func(t *testing.T) {
-			defer tc.builder.Release()
-
-			b := tc.builder
-			tc.append(b, tc.values)
-
-			arr := b.NewArray()
-			defer arr.Release()
-
-			if got, want := arr.Len(), len(valids); got != want {
-				t.Fatalf("invalid length: got=%d, want=%d", got, want)
-			}
-
-			slice := array.NewSlice(arr, 2, 5)
-			defer slice.Release()
-
-			if got, want := slice.Len(), 3; got != want {
-				t.Fatalf("invalid slice length: got=%d, want=%d", got, want)
-			}
-
-			shortSlice := array.NewSlice(arr, 2, 3)
-			defer shortSlice.Release()
-
-			sliceOfShortSlice := array.NewSlice(shortSlice, 0, 1)
-			defer sliceOfShortSlice.Release()
-
-			if got, want := sliceOfShortSlice.Len(), 1; got != want {
-				t.Fatalf("invalid short slice length: got=%d, want=%d", got, want)
-			}
-		})
-	}
-}
diff --git a/go/arrow/array/binary.go b/go/arrow/array/binary.go
deleted file mode 100644
index ed58910..0000000
--- a/go/arrow/array/binary.go
+++ /dev/null
@@ -1,134 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"bytes"
-	"fmt"
-	"strings"
-	"unsafe"
-
-	"github.com/apache/arrow/go/arrow"
-)
-
-// A type which represents an immutable sequence of variable-length binary strings.
-type Binary struct {
-	array
-	valueOffsets []int32
-	valueBytes   []byte
-}
-
-// NewBinaryData constructs a new Binary array from data.
-func NewBinaryData(data *Data) *Binary {
-	a := &Binary{}
-	a.refCount = 1
-	a.setData(data)
-	return a
-}
-
-// Value returns the slice at index i. This value should not be mutated.
-func (a *Binary) Value(i int) []byte {
-	if i < 0 || i >= a.array.data.length {
-		panic("arrow/array: index out of range")
-	}
-	idx := a.array.data.offset + i
-	return a.valueBytes[a.valueOffsets[idx]:a.valueOffsets[idx+1]]
-}
-
-// ValueString returns the string at index i without performing additional allocations.
-// The string is only valid for the lifetime of the Binary array.
-func (a *Binary) ValueString(i int) string {
-	b := a.Value(i)
-	return *(*string)(unsafe.Pointer(&b))
-}
-
-func (a *Binary) ValueOffset(i int) int {
-	if i < 0 || i >= a.array.data.length {
-		panic("arrow/array: index out of range")
-	}
-	return int(a.valueOffsets[a.array.data.offset+i])
-}
-
-func (a *Binary) ValueLen(i int) int {
-	if i < 0 || i >= a.array.data.length {
-		panic("arrow/array: index out of range")
-	}
-	beg := a.array.data.offset + i
-	return int(a.valueOffsets[beg+1] - a.valueOffsets[beg])
-}
-
-func (a *Binary) ValueOffsets() []int32 {
-	beg := a.array.data.offset
-	end := beg + a.array.data.length + 1
-	return a.valueOffsets[beg:end]
-}
-
-func (a *Binary) ValueBytes() []byte {
-	beg := a.array.data.offset
-	end := beg + a.array.data.length
-	return a.valueBytes[a.valueOffsets[beg]:a.valueOffsets[end]]
-}
-
-func (a *Binary) String() string {
-	o := new(strings.Builder)
-	o.WriteString("[")
-	for i := 0; i < a.Len(); i++ {
-		if i > 0 {
-			o.WriteString(" ")
-		}
-		switch {
-		case a.IsNull(i):
-			o.WriteString("(null)")
-		default:
-			fmt.Fprintf(o, "%q", a.ValueString(i))
-		}
-	}
-	o.WriteString("]")
-	return o.String()
-}
-
-func (a *Binary) setData(data *Data) {
-	if len(data.buffers) != 3 {
-		panic("len(data.buffers) != 3")
-	}
-
-	a.array.setData(data)
-
-	if valueData := data.buffers[2]; valueData != nil {
-		a.valueBytes = valueData.Bytes()
-	}
-
-	if valueOffsets := data.buffers[1]; valueOffsets != nil {
-		a.valueOffsets = arrow.Int32Traits.CastFromBytes(valueOffsets.Bytes())
-	}
-}
-
-func arrayEqualBinary(left, right *Binary) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		if bytes.Compare(left.Value(i), right.Value(i)) != 0 {
-			return false
-		}
-	}
-	return true
-}
-
-var (
-	_ Interface = (*Binary)(nil)
-)
diff --git a/go/arrow/array/binary_test.go b/go/arrow/array/binary_test.go
deleted file mode 100644
index a7bbd47..0000000
--- a/go/arrow/array/binary_test.go
+++ /dev/null
@@ -1,430 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"reflect"
-	"testing"
-
-	"github.com/stretchr/testify/assert"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-func TestBinary(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	b := NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-
-	values := [][]byte{
-		[]byte("AAA"),
-		nil,
-		[]byte("BBBB"),
-	}
-	valid := []bool{true, false, true}
-	b.AppendValues(values, valid)
-
-	b.Retain()
-	b.Release()
-
-	a := b.NewBinaryArray()
-	assert.Equal(t, 3, a.Len())
-	assert.Equal(t, 1, a.NullN())
-	assert.Equal(t, []byte("AAA"), a.Value(0))
-	assert.Equal(t, []byte{}, a.Value(1))
-	assert.Equal(t, []byte("BBBB"), a.Value(2))
-	a.Release()
-
-	// Test builder reset and NewArray API.
-	b.AppendValues(values, valid)
-	a = b.NewArray().(*Binary)
-	assert.Equal(t, 3, a.Len())
-	assert.Equal(t, 1, a.NullN())
-	assert.Equal(t, []byte("AAA"), a.Value(0))
-	assert.Equal(t, []byte{}, a.Value(1))
-	assert.Equal(t, []byte("BBBB"), a.Value(2))
-	a.Release()
-
-	b.Release()
-}
-
-func TestBinarySliceData(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	values := []string{"a", "bc", "def", "g", "hijk", "lm", "n", "opq", "rs", "tu"}
-
-	b := NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-	defer b.Release()
-
-	for _, v := range values {
-		b.AppendString(v)
-	}
-
-	arr := b.NewArray().(*Binary)
-	defer arr.Release()
-
-	if got, want := arr.Len(), len(values); got != want {
-		t.Fatalf("got=%d, want=%d", got, want)
-	}
-
-	vs := make([]string, arr.Len())
-
-	for i := range vs {
-		vs[i] = arr.ValueString(i)
-	}
-
-	if got, want := vs, values; !reflect.DeepEqual(got, want) {
-		t.Fatalf("got=%v, want=%v", got, want)
-	}
-
-	tests := []struct {
-		interval [2]int64
-		want     []string
-	}{
-		{
-			interval: [2]int64{0, 0},
-			want:     []string{},
-		},
-		{
-			interval: [2]int64{0, 5},
-			want:     []string{"a", "bc", "def", "g", "hijk"},
-		},
-		{
-			interval: [2]int64{0, 10},
-			want:     []string{"a", "bc", "def", "g", "hijk", "lm", "n", "opq", "rs", "tu"},
-		},
-		{
-			interval: [2]int64{5, 10},
-			want:     []string{"lm", "n", "opq", "rs", "tu"},
-		},
-		{
-			interval: [2]int64{10, 10},
-			want:     []string{},
-		},
-		{
-			interval: [2]int64{2, 7},
-			want:     []string{"def", "g", "hijk", "lm", "n"},
-		},
-	}
-
-	for _, tc := range tests {
-		t.Run("", func(t *testing.T) {
-
-			slice := NewSlice(arr, tc.interval[0], tc.interval[1]).(*Binary)
-			defer slice.Release()
-
-			if got, want := slice.Len(), len(tc.want); got != want {
-				t.Fatalf("got=%d, want=%d", got, want)
-			}
-
-			vs := make([]string, slice.Len())
-
-			for i := range vs {
-				vs[i] = slice.ValueString(i)
-			}
-
-			if got, want := vs, tc.want; !reflect.DeepEqual(got, want) {
-				t.Fatalf("got=%v, want=%v", got, want)
-			}
-		})
-	}
-}
-
-func TestBinarySliceDataWithNull(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	values := []string{"a", "bc", "", "", "hijk", "lm", "", "opq", "", "tu"}
-	valids := []bool{true, true, false, false, true, true, true, true, false, true}
-
-	b := NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-	defer b.Release()
-
-	b.AppendStringValues(values, valids)
-
-	arr := b.NewArray().(*Binary)
-	defer arr.Release()
-
-	if got, want := arr.Len(), len(values); got != want {
-		t.Fatalf("got=%d, want=%d", got, want)
-	}
-
-	if got, want := arr.NullN(), 3; got != want {
-		t.Fatalf("got=%d, want=%d", got, want)
-	}
-
-	vs := make([]string, arr.Len())
-
-	for i := range vs {
-		vs[i] = arr.ValueString(i)
-	}
-
-	if got, want := vs, values; !reflect.DeepEqual(got, want) {
-		t.Fatalf("got=%v, want=%v", got, want)
-	}
-
-	tests := []struct {
-		interval [2]int64
-		nulls    int
-		want     []string
-	}{
-		{
-			interval: [2]int64{0, 2},
-			nulls:    0,
-			want:     []string{"a", "bc"},
-		},
-		{
-			interval: [2]int64{0, 3},
-			nulls:    1,
-			want:     []string{"a", "bc", ""},
-		},
-		{
-			interval: [2]int64{0, 4},
-			nulls:    2,
-			want:     []string{"a", "bc", "", ""},
-		},
-		{
-			interval: [2]int64{4, 8},
-			nulls:    0,
-			want:     []string{"hijk", "lm", "", "opq"},
-		},
-		{
-			interval: [2]int64{2, 9},
-			nulls:    3,
-			want:     []string{"", "", "hijk", "lm", "", "opq", ""},
-		},
-	}
-
-	for _, tc := range tests {
-		t.Run("", func(t *testing.T) {
-
-			slice := NewSlice(arr, tc.interval[0], tc.interval[1]).(*Binary)
-			defer slice.Release()
-
-			if got, want := slice.Len(), len(tc.want); got != want {
-				t.Fatalf("got=%d, want=%d", got, want)
-			}
-
-			if got, want := slice.NullN(), tc.nulls; got != want {
-				t.Errorf("got=%d, want=%d", got, want)
-			}
-
-			vs := make([]string, slice.Len())
-
-			for i := range vs {
-				vs[i] = slice.ValueString(i)
-			}
-
-			if got, want := vs, tc.want; !reflect.DeepEqual(got, want) {
-				t.Fatalf("got=%v, want=%v", got, want)
-			}
-		})
-	}
-}
-
-func TestBinarySliceOutOfBounds(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	values := []string{"a", "bc", "def", "g", "hijk", "lm", "n", "opq", "rs", "tu"}
-
-	b := NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-	defer b.Release()
-
-	for _, v := range values {
-		b.AppendString(v)
-	}
-
-	arr := b.NewArray().(*Binary)
-	defer arr.Release()
-
-	slice := NewSlice(arr, 3, 8).(*Binary)
-	defer slice.Release()
-
-	tests := []struct {
-		index int
-		panic bool
-	}{
-		{
-			index: -1,
-			panic: true,
-		},
-		{
-			index: 5,
-			panic: true,
-		},
-		{
-			index: 0,
-			panic: false,
-		},
-		{
-			index: 4,
-			panic: false,
-		},
-	}
-
-	for _, tc := range tests {
-		t.Run("", func(t *testing.T) {
-
-			var val string
-
-			if tc.panic {
-				defer func() {
-					e := recover()
-					if e == nil {
-						t.Fatalf("this should have panicked, but did not; slice value %q", val)
-					}
-					if got, want := e.(string), "arrow/array: index out of range"; got != want {
-						t.Fatalf("invalid error. got=%q, want=%q", got, want)
-					}
-				}()
-			} else {
-				defer func() {
-					if e := recover(); e != nil {
-						t.Fatalf("unexpected panic: %v", e)
-					}
-				}()
-			}
-
-			val = slice.ValueString(tc.index)
-		})
-	}
-}
-
-func TestBinaryValueOffset(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	values := []string{"a", "bc", "", "", "hijk", "lm", "", "opq", "", "tu"}
-	valids := []bool{true, true, false, false, true, true, true, true, false, true}
-
-	b := NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-	defer b.Release()
-
-	b.AppendStringValues(values, valids)
-
-	arr := b.NewArray().(*Binary)
-	defer arr.Release()
-
-	slice := NewSlice(arr, 2, 9).(*Binary)
-	defer slice.Release()
-
-	offset := 3
-	vs := values[2:9]
-
-	for i, v := range vs {
-		assert.Equal(t, offset, slice.ValueOffset(i))
-		offset += len(v)
-	}
-}
-
-func TestBinaryValueLen(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	values := []string{"a", "bc", "", "", "hijk", "lm", "", "opq", "", "tu"}
-	valids := []bool{true, true, false, false, true, true, true, true, false, true}
-
-	b := NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-	defer b.Release()
-
-	b.AppendStringValues(values, valids)
-
-	arr := b.NewArray().(*Binary)
-	defer arr.Release()
-
-	slice := NewSlice(arr, 2, 9).(*Binary)
-	defer slice.Release()
-
-	vs := values[2:9]
-
-	for i, v := range vs {
-		assert.Equal(t, len(v), slice.ValueLen(i))
-	}
-}
-
-func TestBinaryValueOffsets(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	values := []string{"a", "bc", "", "", "hijk", "lm", "", "opq", "", "tu"}
-	valids := []bool{true, true, false, false, true, true, true, true, false, true}
-
-	b := NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-	defer b.Release()
-
-	b.AppendStringValues(values, valids)
-
-	arr := b.NewArray().(*Binary)
-	defer arr.Release()
-
-	assert.Equal(t, []int32{0, 1, 3, 3, 3, 7, 9, 9, 12, 12, 14}, arr.ValueOffsets())
-
-	slice := NewSlice(arr, 2, 9).(*Binary)
-	defer slice.Release()
-
-	assert.Equal(t, []int32{3, 3, 3, 7, 9, 9, 12, 12}, slice.ValueOffsets())
-}
-
-func TestBinaryValueBytes(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	values := []string{"a", "bc", "", "", "hijk", "lm", "", "opq", "", "tu"}
-	valids := []bool{true, true, false, false, true, true, true, true, false, true}
-
-	b := NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-	defer b.Release()
-
-	b.AppendStringValues(values, valids)
-
-	arr := b.NewArray().(*Binary)
-	defer arr.Release()
-
-	assert.Equal(t, []byte{'a', 'b', 'c', 'h', 'i', 'j', 'k', 'l', 'm', 'o', 'p', 'q', 't', 'u'}, arr.ValueBytes())
-
-	slice := NewSlice(arr, 2, 9).(*Binary)
-	defer slice.Release()
-
-	assert.Equal(t, []byte{'h', 'i', 'j', 'k', 'l', 'm', 'o', 'p', 'q'}, slice.ValueBytes())
-}
-
-func TestBinaryStringer(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	values := []string{"a", "bc", "", "é", "", "hijk", "lm", "", "opq", "", "tu"}
-	valids := []bool{true, true, false, true, false, true, true, true, true, false, true}
-
-	b := NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-	defer b.Release()
-
-	b.AppendStringValues(values, valids)
-
-	arr := b.NewArray().(*Binary)
-	defer arr.Release()
-
-	got := arr.String()
-	want := `["a" "bc" (null) "é" (null) "hijk" "lm" "" "opq" (null) "tu"]`
-
-	if got != want {
-		t.Fatalf("invalid stringer:\ngot= %s\nwant=%s\n", got, want)
-	}
-}
diff --git a/go/arrow/array/binarybuilder.go b/go/arrow/array/binarybuilder.go
deleted file mode 100644
index 17562fc..0000000
--- a/go/arrow/array/binarybuilder.go
+++ /dev/null
@@ -1,217 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"math"
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-const (
-	binaryArrayMaximumCapacity = math.MaxInt32
-)
-
-// A BinaryBuilder is used to build a Binary array using the Append methods.
-type BinaryBuilder struct {
-	builder
-
-	dtype   arrow.BinaryDataType
-	offsets *int32BufferBuilder
-	values  *byteBufferBuilder
-}
-
-func NewBinaryBuilder(mem memory.Allocator, dtype arrow.BinaryDataType) *BinaryBuilder {
-	b := &BinaryBuilder{
-		builder: builder{refCount: 1, mem: mem},
-		dtype:   dtype,
-		offsets: newInt32BufferBuilder(mem),
-		values:  newByteBufferBuilder(mem),
-	}
-	return b
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-// Release may be called simultaneously from multiple goroutines.
-func (b *BinaryBuilder) Release() {
-	debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&b.refCount, -1) == 0 {
-		if b.nullBitmap != nil {
-			b.nullBitmap.Release()
-			b.nullBitmap = nil
-		}
-		if b.offsets != nil {
-			b.offsets.Release()
-			b.offsets = nil
-		}
-		if b.values != nil {
-			b.values.Release()
-			b.values = nil
-		}
-	}
-}
-
-func (b *BinaryBuilder) Append(v []byte) {
-	b.Reserve(1)
-	b.appendNextOffset()
-	b.values.Append(v)
-	b.UnsafeAppendBoolToBitmap(true)
-}
-
-func (b *BinaryBuilder) AppendString(v string) {
-	b.Append([]byte(v))
-}
-
-func (b *BinaryBuilder) AppendNull() {
-	b.Reserve(1)
-	b.appendNextOffset()
-	b.UnsafeAppendBoolToBitmap(false)
-}
-
-// AppendValues will append the values in the v slice. The valid slice determines which values
-// in v are valid (not null). The valid slice must either be empty or be equal in length to v. If empty,
-// all values in v are appended and considered valid.
-func (b *BinaryBuilder) AppendValues(v [][]byte, valid []bool) {
-	if len(v) != len(valid) && len(valid) != 0 {
-		panic("len(v) != len(valid) && len(valid) != 0")
-	}
-
-	if len(v) == 0 {
-		return
-	}
-
-	b.Reserve(len(v))
-	for _, vv := range v {
-		b.appendNextOffset()
-		b.values.Append(vv)
-	}
-
-	b.builder.unsafeAppendBoolsToBitmap(valid, len(v))
-}
-
-// AppendStringValues will append the values in the v slice. The valid slice determines which values
-// in v are valid (not null). The valid slice must either be empty or be equal in length to v. If empty,
-// all values in v are appended and considered valid.
-func (b *BinaryBuilder) AppendStringValues(v []string, valid []bool) {
-	if len(v) != len(valid) && len(valid) != 0 {
-		panic("len(v) != len(valid) && len(valid) != 0")
-	}
-
-	if len(v) == 0 {
-		return
-	}
-
-	b.Reserve(len(v))
-	for _, vv := range v {
-		b.appendNextOffset()
-		b.values.Append([]byte(vv))
-	}
-
-	b.builder.unsafeAppendBoolsToBitmap(valid, len(v))
-}
-
-func (b *BinaryBuilder) Value(i int) []byte {
-	offsets := b.offsets.Values()
-	start := int(offsets[i])
-	var end int
-	if i == (b.length - 1) {
-		end = b.values.Len()
-	} else {
-		end = int(offsets[i+1])
-	}
-	return b.values.Bytes()[start:end]
-}
-
-func (b *BinaryBuilder) init(capacity int) {
-	b.builder.init(capacity)
-	b.offsets.resize((capacity + 1) * arrow.Int32SizeBytes)
-}
-
-// DataLen returns the number of bytes in the data array.
-func (b *BinaryBuilder) DataLen() int { return b.values.length }
-
-// DataCap returns the total number of bytes that can be stored
-// without allocating additional memory.
-func (b *BinaryBuilder) DataCap() int { return b.values.capacity }
-
-// Reserve ensures there is enough space for appending n elements
-// by checking the capacity and calling Resize if necessary.
-func (b *BinaryBuilder) Reserve(n int) {
-	b.builder.reserve(n, b.Resize)
-}
-
-// ReserveData ensures there is enough space for appending n bytes
-// by checking the capacity and resizing the data buffer if necessary.
-func (b *BinaryBuilder) ReserveData(n int) {
-	if b.values.capacity < b.values.length+n {
-		b.values.resize(b.values.Len() + n)
-	}
-}
-
-// Resize adjusts the space allocated by b to n elements. If n is greater than b.Cap(),
-// additional memory will be allocated. If n is smaller, the allocated memory may be reduced.
-func (b *BinaryBuilder) Resize(n int) {
-	b.offsets.resize((n + 1) * arrow.Int32SizeBytes)
-	b.builder.resize(n, b.init)
-}
-
-// NewArray creates a Binary array from the memory buffers used by the builder and resets the BinaryBuilder
-// so it can be used to build a new array.
-func (b *BinaryBuilder) NewArray() Interface {
-	return b.NewBinaryArray()
-}
-
-// NewBinaryArray creates a Binary array from the memory buffers used by the builder and resets the BinaryBuilder
-// so it can be used to build a new array.
-func (b *BinaryBuilder) NewBinaryArray() (a *Binary) {
-	data := b.newData()
-	a = NewBinaryData(data)
-	data.Release()
-	return
-}
-
-func (b *BinaryBuilder) newData() (data *Data) {
-	b.appendNextOffset()
-	offsets, values := b.offsets.Finish(), b.values.Finish()
-	data = NewData(b.dtype, b.length, []*memory.Buffer{b.nullBitmap, offsets, values}, nil, b.nulls, 0)
-	if offsets != nil {
-		offsets.Release()
-	}
-
-	if values != nil {
-		values.Release()
-	}
-
-	b.builder.reset()
-
-	return
-}
-
-func (b *BinaryBuilder) appendNextOffset() {
-	numBytes := b.values.Len()
-	// TODO(sgc): check binaryArrayMaximumCapacity?
-	b.offsets.AppendValue(int32(numBytes))
-}
-
-var (
-	_ Builder = (*BinaryBuilder)(nil)
-)
diff --git a/go/arrow/array/binarybuilder_test.go b/go/arrow/array/binarybuilder_test.go
deleted file mode 100644
index 145435b..0000000
--- a/go/arrow/array/binarybuilder_test.go
+++ /dev/null
@@ -1,87 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array_test
-
-import (
-	"bytes"
-	"testing"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/memory"
-	"github.com/stretchr/testify/assert"
-)
-
-func TestBinaryBuilder(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	ab := array.NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-
-	exp := [][]byte{[]byte("foo"), []byte("bar"), nil, []byte("sydney"), []byte("cameron")}
-	for _, v := range exp {
-		if v == nil {
-			ab.AppendNull()
-		} else {
-			ab.Append(v)
-		}
-	}
-
-	assert.Equal(t, len(exp), ab.Len(), "unexpected Len()")
-	assert.Equal(t, 1, ab.NullN(), "unexpected NullN()")
-
-	for i, v := range exp {
-		if v == nil {
-			v = []byte{}
-		}
-		assert.Equal(t, v, ab.Value(i), "unexpected BinaryArrayBuilder.Value(%d)", i)
-	}
-
-	ar := ab.NewBinaryArray()
-	ab.Release()
-	ar.Release()
-
-	// check state of builder after NewBinaryArray
-	assert.Zero(t, ab.Len(), "unexpected ArrayBuilder.Len(), NewBinaryArray did not reset state")
-	assert.Zero(t, ab.Cap(), "unexpected ArrayBuilder.Cap(), NewBinaryArray did not reset state")
-	assert.Zero(t, ab.NullN(), "unexpected ArrayBuilder.NullN(), NewBinaryArray did not reset state")
-}
-
-func TestBinaryBuilder_ReserveData(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	ab := array.NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-
-	// call ReserveData and ensure the capacity doesn't change
-	// when appending entries until that count.
-	ab.ReserveData(256)
-	expCap := ab.DataCap()
-	for i := 0; i < 256 / 8; i++ {
-		ab.Append(bytes.Repeat([]byte("a"), 8))
-	}
-	assert.Equal(t, expCap, ab.DataCap(), "unexpected BinaryArrayBuilder.DataCap()")
-
-	ar := ab.NewBinaryArray()
-	ab.Release()
-	ar.Release()
-
-	// check state of builder after NewBinaryArray
-	assert.Zero(t, ab.Len(), "unexpected ArrayBuilder.Len(), NewBinaryArray did not reset state")
-	assert.Zero(t, ab.Cap(), "unexpected ArrayBuilder.Cap(), NewBinaryArray did not reset state")
-	assert.Zero(t, ab.NullN(), "unexpected ArrayBuilder.NullN(), NewBinaryArray did not reset state")
-}
diff --git a/go/arrow/array/boolean.go b/go/arrow/array/boolean.go
deleted file mode 100644
index e352e6e..0000000
--- a/go/arrow/array/boolean.go
+++ /dev/null
@@ -1,95 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"fmt"
-	"strings"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-// A type which represents an immutable sequence of boolean values.
-type Boolean struct {
-	array
-	values []byte
-}
-
-// NewBoolean creates a boolean array from the data memory.Buffer and contains length elements.
-// The nullBitmap buffer can be nil of there are no null values.
-// If nulls is not known, use UnknownNullCount to calculate the value of NullN at runtime from the nullBitmap buffer.
-func NewBoolean(length int, data *memory.Buffer, nullBitmap *memory.Buffer, nulls int) *Boolean {
-	return NewBooleanData(NewData(arrow.FixedWidthTypes.Boolean, length, []*memory.Buffer{nullBitmap, data}, nil, nulls, 0))
-}
-
-func NewBooleanData(data *Data) *Boolean {
-	a := &Boolean{}
-	a.refCount = 1
-	a.setData(data)
-	return a
-}
-
-func (a *Boolean) Value(i int) bool {
-	if i < 0 || i >= a.array.data.length {
-		panic("arrow/array: index out of range")
-	}
-	return bitutil.BitIsSet(a.values, a.array.data.offset+i)
-}
-
-func (a *Boolean) String() string {
-	o := new(strings.Builder)
-	o.WriteString("[")
-	for i := 0; i < a.Len(); i++ {
-		if i > 0 {
-			fmt.Fprintf(o, " ")
-		}
-		switch {
-		case a.IsNull(i):
-			o.WriteString("(null)")
-		default:
-			fmt.Fprintf(o, "%v", a.Value(i))
-		}
-	}
-	o.WriteString("]")
-	return o.String()
-}
-
-func (a *Boolean) setData(data *Data) {
-	a.array.setData(data)
-	vals := data.buffers[1]
-	if vals != nil {
-		a.values = vals.Bytes()
-	}
-}
-
-func arrayEqualBoolean(left, right *Boolean) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		if left.Value(i) != right.Value(i) {
-			return false
-		}
-	}
-	return true
-}
-
-var (
-	_ Interface = (*Boolean)(nil)
-)
diff --git a/go/arrow/array/boolean_test.go b/go/arrow/array/boolean_test.go
deleted file mode 100644
index e2d5805..0000000
--- a/go/arrow/array/boolean_test.go
+++ /dev/null
@@ -1,288 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array_test
-
-import (
-	"fmt"
-	"reflect"
-	"strings"
-	"testing"
-
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-func TestBooleanSliceData(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	values := []bool{true, false, true, true, true, true, true, false, true, false}
-
-	b := array.NewBooleanBuilder(pool)
-	defer b.Release()
-
-	for _, v := range values {
-		b.Append(v)
-	}
-
-	arr := b.NewArray().(*array.Boolean)
-	defer arr.Release()
-
-	if got, want := arr.Len(), len(values); got != want {
-		t.Fatalf("got=%d, want=%d", got, want)
-	}
-
-	vs := make([]bool, arr.Len())
-
-	for i := range vs {
-		vs[i] = arr.Value(i)
-	}
-
-	if got, want := vs, values; !reflect.DeepEqual(got, want) {
-		t.Fatalf("got=%v, want=%v", got, want)
-	}
-
-	tests := []struct {
-		interval [2]int64
-		want     []bool
-	}{
-		{
-			interval: [2]int64{0, 0},
-			want:     []bool{},
-		},
-		{
-			interval: [2]int64{10, 10},
-			want:     []bool{},
-		},
-		{
-			interval: [2]int64{0, 5},
-			want:     []bool{true, false, true, true, true},
-		},
-		{
-			interval: [2]int64{5, 10},
-			want:     []bool{true, true, false, true, false},
-		},
-		{
-			interval: [2]int64{2, 7},
-			want:     []bool{true, true, true, true, true},
-		},
-	}
-
-	for _, tc := range tests {
-		t.Run("", func(t *testing.T) {
-
-			slice := array.NewSlice(arr, tc.interval[0], tc.interval[1]).(*array.Boolean)
-			defer slice.Release()
-
-			if got, want := slice.Len(), len(tc.want); got != want {
-				t.Fatalf("got=%d, want=%d", got, want)
-			}
-
-			vs := make([]bool, slice.Len())
-
-			for i := range vs {
-				vs[i] = slice.Value(i)
-			}
-
-			if got, want := vs, tc.want; !reflect.DeepEqual(got, want) {
-				t.Fatalf("got=%v, want=%v", got, want)
-			}
-		})
-	}
-}
-
-func TestBooleanSliceDataWithNull(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	values := []bool{true, false, true, false, false, false, true, false, true, false}
-	valids := []bool{true, false, true, true, true, true, true, false, true, true}
-
-	b := array.NewBooleanBuilder(pool)
-	defer b.Release()
-
-	b.AppendValues(values, valids)
-
-	arr := b.NewArray().(*array.Boolean)
-	defer arr.Release()
-
-	if got, want := arr.Len(), len(valids); got != want {
-		t.Fatalf("got=%d, want=%d", got, want)
-	}
-
-	if got, want := arr.NullN(), 2; got != want {
-		t.Fatalf("got=%d, want=%d", got, want)
-	}
-
-	vs := make([]bool, arr.Len())
-
-	for i := range vs {
-		vs[i] = arr.Value(i)
-	}
-
-	if got, want := vs, values; !reflect.DeepEqual(got, want) {
-		t.Fatalf("got=%v, want=%v", got, want)
-	}
-
-	tests := []struct {
-		interval [2]int64
-		nulls    int
-		want     []bool
-	}{
-		{
-			interval: [2]int64{2, 9},
-			nulls:    1,
-			want:     []bool{true, false, false, false, true, false, true},
-		},
-		{
-			interval: [2]int64{0, 7},
-			nulls:    1,
-			want:     []bool{true, false, true, false, false, false, true},
-		},
-		{
-			interval: [2]int64{1, 8},
-			nulls:    2,
-			want:     []bool{false, true, false, false, false, true, false},
-		},
-		{
-			interval: [2]int64{2, 7},
-			nulls:    0,
-			want:     []bool{true, false, false, false, true},
-		},
-	}
-
-	for _, tc := range tests {
-		t.Run("", func(t *testing.T) {
-
-			slice := array.NewSlice(arr, tc.interval[0], tc.interval[1]).(*array.Boolean)
-			defer slice.Release()
-
-			if got, want := slice.NullN(), tc.nulls; got != want {
-				t.Errorf("got=%d, want=%d", got, want)
-			}
-
-			if got, want := slice.Len(), len(tc.want); got != want {
-				t.Fatalf("got=%d, want=%d", got, want)
-			}
-
-			vs := make([]bool, slice.Len())
-
-			for i := range vs {
-				vs[i] = slice.Value(i)
-			}
-
-			if got, want := vs, tc.want; !reflect.DeepEqual(got, want) {
-				t.Fatalf("got=%v, want=%v", got, want)
-			}
-		})
-	}
-}
-
-func TestBooleanSliceOutOfBounds(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	values := []bool{true, false, true, false, true, false, true, false, true, false}
-
-	b := array.NewBooleanBuilder(pool)
-	defer b.Release()
-
-	for _, v := range values {
-		b.Append(v)
-	}
-
-	arr := b.NewArray().(*array.Boolean)
-	defer arr.Release()
-
-	slice := array.NewSlice(arr, 3, 8).(*array.Boolean)
-	defer slice.Release()
-
-	tests := []struct {
-		index int
-		panic bool
-	}{
-		{
-			index: -1,
-			panic: true,
-		},
-		{
-			index: 5,
-			panic: true,
-		},
-		{
-			index: 0,
-			panic: false,
-		},
-		{
-			index: 4,
-			panic: false,
-		},
-	}
-
-	for _, tc := range tests {
-		t.Run("", func(t *testing.T) {
-
-			var val bool
-
-			if tc.panic {
-				defer func() {
-					e := recover()
-					if e == nil {
-						t.Fatalf("this should have panicked, but did not; slice value %v", val)
-					}
-					if got, want := e.(string), "arrow/array: index out of range"; got != want {
-						t.Fatalf("invalid error. got=%q, want=%q", got, want)
-					}
-				}()
-			} else {
-				defer func() {
-					if e := recover(); e != nil {
-						t.Fatalf("unexpected panic: %v", e)
-					}
-				}()
-			}
-
-			val = slice.Value(tc.index)
-		})
-	}
-}
-
-func TestBooleanStringer(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	var (
-		values = []bool{true, false, true, false, true, false, true, false, true, false}
-		valids = []bool{true, true, false, true, true, true, false, true, true, true}
-	)
-
-	b := array.NewBooleanBuilder(pool)
-	defer b.Release()
-
-	b.AppendValues(values, valids)
-
-	arr := b.NewArray().(*array.Boolean)
-	defer arr.Release()
-
-	out := new(strings.Builder)
-	fmt.Fprintf(out, "%v", arr)
-
-	const want = "[true false (null) false true false (null) false true false]"
-	if got := out.String(); got != want {
-		t.Fatalf("invalid stringer:\ngot= %q\nwant=%q", got, want)
-	}
-}
diff --git a/go/arrow/array/booleanbuilder.go b/go/arrow/array/booleanbuilder.go
deleted file mode 100644
index 4a38156..0000000
--- a/go/arrow/array/booleanbuilder.go
+++ /dev/null
@@ -1,165 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-type BooleanBuilder struct {
-	builder
-
-	data    *memory.Buffer
-	rawData []byte
-}
-
-func NewBooleanBuilder(mem memory.Allocator) *BooleanBuilder {
-	return &BooleanBuilder{builder: builder{refCount: 1, mem: mem}}
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-// Release may be called simultaneously from multiple goroutines.
-func (b *BooleanBuilder) Release() {
-	debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&b.refCount, -1) == 0 {
-		if b.nullBitmap != nil {
-			b.nullBitmap.Release()
-			b.nullBitmap = nil
-		}
-		if b.data != nil {
-			b.data.Release()
-			b.data = nil
-			b.rawData = nil
-		}
-	}
-}
-
-func (b *BooleanBuilder) Append(v bool) {
-	b.Reserve(1)
-	b.UnsafeAppend(v)
-}
-
-func (b *BooleanBuilder) AppendByte(v byte) {
-	b.Reserve(1)
-	b.UnsafeAppend(v != 0)
-}
-
-func (b *BooleanBuilder) AppendNull() {
-	b.Reserve(1)
-	b.UnsafeAppendBoolToBitmap(false)
-}
-
-func (b *BooleanBuilder) UnsafeAppend(v bool) {
-	bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	if v {
-		bitutil.SetBit(b.rawData, b.length)
-	} else {
-		bitutil.ClearBit(b.rawData, b.length)
-	}
-	b.length++
-}
-
-func (b *BooleanBuilder) AppendValues(v []bool, valid []bool) {
-	if len(v) != len(valid) && len(valid) != 0 {
-		panic("len(v) != len(valid) && len(valid) != 0")
-	}
-
-	if len(v) == 0 {
-		return
-	}
-
-	b.Reserve(len(v))
-	for i, vv := range v {
-		bitutil.SetBitTo(b.rawData, b.length+i, vv)
-	}
-	b.builder.unsafeAppendBoolsToBitmap(valid, len(v))
-}
-
-func (b *BooleanBuilder) init(capacity int) {
-	b.builder.init(capacity)
-
-	b.data = memory.NewResizableBuffer(b.mem)
-	bytesN := arrow.BooleanTraits.BytesRequired(capacity)
-	b.data.Resize(bytesN)
-	b.rawData = b.data.Bytes()
-}
-
-// Reserve ensures there is enough space for appending n elements
-// by checking the capacity and calling Resize if necessary.
-func (b *BooleanBuilder) Reserve(n int) {
-	b.builder.reserve(n, b.Resize)
-}
-
-// Resize adjusts the space allocated by b to n elements. If n is greater than b.Cap(),
-// additional memory will be allocated. If n is smaller, the allocated memory may reduced.
-func (b *BooleanBuilder) Resize(n int) {
-	if n < minBuilderCapacity {
-		n = minBuilderCapacity
-	}
-
-	if b.capacity == 0 {
-		b.init(n)
-	} else {
-		b.builder.resize(n, b.init)
-		b.data.Resize(arrow.BooleanTraits.BytesRequired(n))
-		b.rawData = b.data.Bytes()
-	}
-}
-
-// NewArray creates a Boolean array from the memory buffers used by the builder and resets the BooleanBuilder
-// so it can be used to build a new array.
-func (b *BooleanBuilder) NewArray() Interface {
-	return b.NewBooleanArray()
-}
-
-// NewBooleanArray creates a Boolean array from the memory buffers used by the builder and resets the BooleanBuilder
-// so it can be used to build a new array.
-func (b *BooleanBuilder) NewBooleanArray() (a *Boolean) {
-	data := b.newData()
-	a = NewBooleanData(data)
-	data.Release()
-	return
-}
-
-func (b *BooleanBuilder) newData() *Data {
-	bytesRequired := arrow.BooleanTraits.BytesRequired(b.length)
-	if bytesRequired > 0 && bytesRequired < b.data.Len() {
-		// trim buffers
-		b.data.Resize(bytesRequired)
-	}
-	res := NewData(arrow.FixedWidthTypes.Boolean, b.length, []*memory.Buffer{b.nullBitmap, b.data}, nil, b.nulls, 0)
-	b.reset()
-
-	if b.data != nil {
-		b.data.Release()
-		b.data = nil
-		b.rawData = nil
-	}
-
-	return res
-}
-
-var (
-	_ Builder = (*BooleanBuilder)(nil)
-)
diff --git a/go/arrow/array/booleanbuilder_test.go b/go/arrow/array/booleanbuilder_test.go
deleted file mode 100644
index 26de4c9..0000000
--- a/go/arrow/array/booleanbuilder_test.go
+++ /dev/null
@@ -1,90 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array_test
-
-import (
-	"testing"
-
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/internal/testing/tools"
-	"github.com/apache/arrow/go/arrow/memory"
-	"github.com/stretchr/testify/assert"
-)
-
-func TestBooleanBuilder_AppendValues(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	b := array.NewBooleanBuilder(mem)
-
-	exp := tools.Bools(1, 1, 0, 1, 1, 0, 1, 0)
-	got := make([]bool, len(exp))
-
-	b.AppendValues(exp, nil)
-	a := b.NewBooleanArray()
-	b.Release()
-	for i := 0; i < a.Len(); i++ {
-		got[i] = a.Value(i)
-	}
-	assert.Equal(t, exp, got)
-	a.Release()
-}
-
-func TestBooleanBuilder_Empty(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	ab := array.NewBooleanBuilder(mem)
-	defer ab.Release()
-
-	want := tools.Bools(1, 1, 0, 1, 1, 0, 1, 0)
-
-	boolValues := func(a *array.Boolean) []bool {
-		vs := make([]bool, a.Len())
-		for i := range vs {
-			vs[i] = a.Value(i)
-		}
-		return vs
-	}
-
-	ab.AppendValues([]bool{}, nil)
-	a := ab.NewBooleanArray()
-	assert.Zero(t, a.Len())
-	a.Release()
-
-	ab.AppendValues(nil, nil)
-	a = ab.NewBooleanArray()
-	assert.Zero(t, a.Len())
-	a.Release()
-
-	ab.AppendValues(want, nil)
-	a = ab.NewBooleanArray()
-	assert.Equal(t, want, boolValues(a))
-	a.Release()
-
-	ab.AppendValues([]bool{}, nil)
-	ab.AppendValues(want, nil)
-	a = ab.NewBooleanArray()
-	assert.Equal(t, want, boolValues(a))
-	a.Release()
-
-	ab.AppendValues(want, nil)
-	ab.AppendValues([]bool{}, nil)
-	a = ab.NewBooleanArray()
-	assert.Equal(t, want, boolValues(a))
-	a.Release()
-}
diff --git a/go/arrow/array/bufferbuilder.go b/go/arrow/array/bufferbuilder.go
deleted file mode 100644
index bcc7153..0000000
--- a/go/arrow/array/bufferbuilder.go
+++ /dev/null
@@ -1,127 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-// A bufferBuilder provides common functionality for populating memory with a sequence of type-specific values.
-// Specialized implementations provide type-safe APIs for appending and accessing the memory.
-type bufferBuilder struct {
-	refCount int64
-	mem      memory.Allocator
-	buffer   *memory.Buffer
-	length   int
-	capacity int
-
-	bytes []byte
-}
-
-// Retain increases the reference count by 1.
-// Retain may be called simultaneously from multiple goroutines.
-func (b *bufferBuilder) Retain() {
-	atomic.AddInt64(&b.refCount, 1)
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-// Release may be called simultaneously from multiple goroutines.
-func (b *bufferBuilder) Release() {
-	debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&b.refCount, -1) == 0 {
-		if b.buffer != nil {
-			b.buffer.Release()
-			b.buffer, b.bytes = nil, nil
-		}
-	}
-}
-
-// Len returns the length of the memory buffer in bytes.
-func (b *bufferBuilder) Len() int { return b.length }
-
-// Cap returns the total number of bytes that can be stored without allocating additional memory.
-func (b *bufferBuilder) Cap() int { return b.capacity }
-
-// Bytes returns a slice of length b.Len().
-// The slice is only valid for use until the next buffer modification. That is, until the next call
-// to Advance, Reset, Finish or any Append function. The slice aliases the buffer content at least until the next
-// buffer modification.
-func (b *bufferBuilder) Bytes() []byte { return b.bytes[:b.length] }
-
-func (b *bufferBuilder) resize(elements int) {
-	if b.buffer == nil {
-		b.buffer = memory.NewResizableBuffer(b.mem)
-	}
-
-	b.buffer.Resize(elements)
-	oldCapacity := b.capacity
-	b.capacity = b.buffer.Cap()
-	b.bytes = b.buffer.Buf()
-
-	if b.capacity > oldCapacity {
-		memory.Set(b.bytes[oldCapacity:], 0)
-	}
-}
-
-// Advance increases the buffer by length and initializes the skipped bytes to zero.
-func (b *bufferBuilder) Advance(length int) {
-	if b.capacity < b.length+length {
-		newCapacity := bitutil.NextPowerOf2(b.length + length)
-		b.resize(newCapacity)
-	}
-	b.length += length
-}
-
-// Append appends the contents of v to the buffer, resizing it if necessary.
-func (b *bufferBuilder) Append(v []byte) {
-	if b.capacity < b.length+len(v) {
-		newCapacity := bitutil.NextPowerOf2(b.length + len(v))
-		b.resize(newCapacity)
-	}
-	b.unsafeAppend(v)
-}
-
-// Reset returns the buffer to an empty state. Reset releases the memory and sets the length and capacity to zero.
-func (b *bufferBuilder) Reset() {
-	if b.buffer != nil {
-		b.buffer.Release()
-	}
-	b.buffer, b.bytes = nil, nil
-	b.capacity, b.length = 0, 0
-}
-
-// Finish TODO(sgc)
-func (b *bufferBuilder) Finish() (buffer *memory.Buffer) {
-	if b.length > 0 {
-		b.buffer.ResizeNoShrink(b.length)
-	}
-	buffer = b.buffer
-	b.buffer = nil
-	b.Reset()
-	return
-}
-
-func (b *bufferBuilder) unsafeAppend(data []byte) {
-	copy(b.bytes[b.length:], data)
-	b.length += len(data)
-}
diff --git a/go/arrow/array/bufferbuilder_byte.go b/go/arrow/array/bufferbuilder_byte.go
deleted file mode 100644
index f5f5445..0000000
--- a/go/arrow/array/bufferbuilder_byte.go
+++ /dev/null
@@ -1,30 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import "github.com/apache/arrow/go/arrow/memory"
-
-type byteBufferBuilder struct {
-	bufferBuilder
-}
-
-func newByteBufferBuilder(mem memory.Allocator) *byteBufferBuilder {
-	return &byteBufferBuilder{bufferBuilder: bufferBuilder{refCount: 1, mem: mem}}
-}
-
-func (b *byteBufferBuilder) Values() []byte   { return b.Bytes() }
-func (b *byteBufferBuilder) Value(i int) byte { return b.bytes[i] }
diff --git a/go/arrow/array/bufferbuilder_numeric.gen.go b/go/arrow/array/bufferbuilder_numeric.gen.go
deleted file mode 100644
index 4cdf426..0000000
--- a/go/arrow/array/bufferbuilder_numeric.gen.go
+++ /dev/null
@@ -1,58 +0,0 @@
-// Code generated by array/bufferbuilder_numeric.gen.go.tmpl. DO NOT EDIT.
-
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-type int32BufferBuilder struct {
-	bufferBuilder
-}
-
-func newInt32BufferBuilder(mem memory.Allocator) *int32BufferBuilder {
-	return &int32BufferBuilder{bufferBuilder: bufferBuilder{refCount: 1, mem: mem}}
-}
-
-// AppendValues appends the contents of v to the buffer, growing the buffer as needed.
-func (b *int32BufferBuilder) AppendValues(v []int32) { b.Append(arrow.Int32Traits.CastToBytes(v)) }
-
-// Values returns a slice of length b.Len().
-// The slice is only valid for use until the next buffer modification. That is, until the next call
-// to Advance, Reset, Finish or any Append function. The slice aliases the buffer content at least until the next
-// buffer modification.
-func (b *int32BufferBuilder) Values() []int32 { return arrow.Int32Traits.CastFromBytes(b.Bytes()) }
-
-// Value returns the int32 element at the index i. Value will panic if i is negative or ≥ Len.
-func (b *int32BufferBuilder) Value(i int) int32 { return b.Values()[i] }
-
-// Len returns the number of int32 elements in the buffer.
-func (b *int32BufferBuilder) Len() int { return b.length / arrow.Int32SizeBytes }
-
-// AppendValue appends v to the buffer, growing the buffer as needed.
-func (b *int32BufferBuilder) AppendValue(v int32) {
-	if b.capacity < b.length+arrow.Int32SizeBytes {
-		newCapacity := bitutil.NextPowerOf2(b.length + arrow.Int32SizeBytes)
-		b.resize(newCapacity)
-	}
-	arrow.Int32Traits.PutValue(b.bytes[b.length:], v)
-	b.length += arrow.Int32SizeBytes
-}
diff --git a/go/arrow/array/bufferbuilder_numeric.gen.go.tmpl b/go/arrow/array/bufferbuilder_numeric.gen.go.tmpl
deleted file mode 100644
index a0ff764..0000000
--- a/go/arrow/array/bufferbuilder_numeric.gen.go.tmpl
+++ /dev/null
@@ -1,61 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-{{range .In}}
-{{$TypeNamePrefix := .name}}
-{{if .Opt.BufferBuilder}}
-type {{$TypeNamePrefix}}BufferBuilder struct {
-	bufferBuilder
-}
-
-func new{{.Name}}BufferBuilder(mem memory.Allocator) *{{$TypeNamePrefix}}BufferBuilder {
-	return &{{$TypeNamePrefix}}BufferBuilder{bufferBuilder:bufferBuilder{refCount: 1, mem:mem}}
-}
-
-// AppendValues appends the contents of v to the buffer, growing the buffer as needed.
-func (b *{{$TypeNamePrefix}}BufferBuilder) AppendValues(v []{{.Type}}) { b.Append(arrow.{{.Name}}Traits.CastToBytes(v)) }
-
-// Values returns a slice of length b.Len().
-// The slice is only valid for use until the next buffer modification. That is, until the next call
-// to Advance, Reset, Finish or any Append function. The slice aliases the buffer content at least until the next
-// buffer modification.
-func (b *{{$TypeNamePrefix}}BufferBuilder) Values() []{{.Type}}           { return arrow.{{.Name}}Traits.CastFromBytes(b.Bytes()) }
-
-// Value returns the {{.Type}} element at the index i. Value will panic if i is negative or ≥ Len.
-func (b *{{$TypeNamePrefix}}BufferBuilder) Value(i int) {{.Type}}         { return b.Values()[i] }
-
-// Len returns the number of {{.Type}} elements in the buffer.
-func (b *{{$TypeNamePrefix}}BufferBuilder) Len() int                      { return b.length/arrow.{{.Name}}SizeBytes }
-
-// AppendValue appends v to the buffer, growing the buffer as needed.
-func (b *{{$TypeNamePrefix}}BufferBuilder) AppendValue(v {{.Type}}) {
-	if b.capacity < b.length+arrow.{{.Name}}SizeBytes {
-		newCapacity := bitutil.NextPowerOf2(b.length + arrow.{{.Name}}SizeBytes)
-		b.resize(newCapacity)
-	}
-	arrow.{{.Name}}Traits.PutValue(b.bytes[b.length:], v)
-	b.length+=arrow.{{.Name}}SizeBytes
-}
-{{end}}
-{{end}}
diff --git a/go/arrow/array/bufferbuilder_numeric_test.go b/go/arrow/array/bufferbuilder_numeric_test.go
deleted file mode 100644
index 7834679..0000000
--- a/go/arrow/array/bufferbuilder_numeric_test.go
+++ /dev/null
@@ -1,106 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"testing"
-	"unsafe"
-
-	"github.com/apache/arrow/go/arrow/memory"
-	"github.com/apache/arrow/go/arrow/endian"
-	"github.com/stretchr/testify/assert"
-)
-
-func TestInt32BufferBuilder(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	bb := newInt32BufferBuilder(mem)
-	exp := []int32{0x01020304, 0x05060708, 0x090a0b0c, 0x0d0e0f01, 0x02030405, 0x06070809}
-	bb.AppendValues(exp[:3])
-	bb.AppendValues(exp[3:])
-
-	var expBuf []byte
-	if endian.IsBigEndian {
-		expBuf = []byte{
-			0x01, 0x02, 0x03, 0x04,
-			0x05, 0x06, 0x07, 0x08,
-			0x09, 0x0a, 0x0b, 0x0c,
-			0x0d, 0x0e, 0x0f, 0x01,
-			0x02, 0x03, 0x04, 0x05,
-			0x06, 0x07, 0x08, 0x09,
-		}
-	} else {
-		expBuf = []byte{
-			0x04, 0x03, 0x02, 0x01,
-			0x08, 0x07, 0x06, 0x05,
-			0x0c, 0x0b, 0x0a, 0x09,
-			0x01, 0x0f, 0x0e, 0x0d,
-			0x05, 0x04, 0x03, 0x02,
-			0x09, 0x08, 0x07, 0x06,
-		}
-	}
-	assert.Equal(t, expBuf, bb.Bytes(), "unexpected byte values")
-	assert.Equal(t, exp, bb.Values(), "unexpected int32 values")
-	assert.Equal(t, len(exp), bb.Len(), "unexpected Len()")
-
-	buflen := bb.Len()
-	bfr := bb.Finish()
-	assert.Equal(t, buflen*int(unsafe.Sizeof(int32(0))), bfr.Len(), "Buffer was not resized")
-	assert.Len(t, bfr.Bytes(), bfr.Len(), "Buffer.Bytes() != Buffer.Len()")
-	bfr.Release()
-
-	assert.Len(t, bb.Bytes(), 0, "BufferBuilder was not reset after Finish")
-	assert.Zero(t, bb.Len(), "BufferBuilder was not reset after Finish")
-	bb.Release()
-}
-
-func TestInt32BufferBuilder_AppendValue(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	bb := newInt32BufferBuilder(mem)
-	exp := []int32{0x01020304, 0x05060708, 0x090a0b0c, 0x0d0e0f01, 0x02030405, 0x06070809}
-	for _, v := range exp {
-		bb.AppendValue(v)
-	}
-
-	var expBuf []byte
-	if endian.IsBigEndian {
-		expBuf = []byte{
-			0x01, 0x02, 0x03, 0x04,
-			0x05, 0x06, 0x07, 0x08,
-			0x09, 0x0a, 0x0b, 0x0c,
-			0x0d, 0x0e, 0x0f, 0x01,
-			0x02, 0x03, 0x04, 0x05,
-			0x06, 0x07, 0x08, 0x09,
-		}
-	} else {
-		expBuf = []byte{
-			0x04, 0x03, 0x02, 0x01,
-			0x08, 0x07, 0x06, 0x05,
-			0x0c, 0x0b, 0x0a, 0x09,
-			0x01, 0x0f, 0x0e, 0x0d,
-			0x05, 0x04, 0x03, 0x02,
-			0x09, 0x08, 0x07, 0x06,
-		}
-	}
-	assert.Equal(t, expBuf, bb.Bytes(), "unexpected byte values")
-	assert.Equal(t, exp, bb.Values(), "unexpected int32 values")
-	assert.Equal(t, len(exp), bb.Len(), "unexpected Len()")
-	bb.Release()
-}
diff --git a/go/arrow/array/builder.go b/go/arrow/array/builder.go
deleted file mode 100644
index 0066e1d..0000000
--- a/go/arrow/array/builder.go
+++ /dev/null
@@ -1,289 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"fmt"
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-const (
-	minBuilderCapacity = 1 << 5
-)
-
-// Builder provides an interface to build arrow arrays.
-type Builder interface {
-	// Retain increases the reference count by 1.
-	// Retain may be called simultaneously from multiple goroutines.
-	Retain()
-
-	// Release decreases the reference count by 1.
-	Release()
-
-	// Len returns the number of elements in the array builder.
-	Len() int
-
-	// Cap returns the total number of elements that can be stored
-	// without allocating additional memory.
-	Cap() int
-
-	// NullN returns the number of null values in the array builder.
-	NullN() int
-
-	// AppendNull adds a new null value to the array being built.
-	AppendNull()
-
-	// Reserve ensures there is enough space for appending n elements
-	// by checking the capacity and calling Resize if necessary.
-	Reserve(n int)
-
-	// Resize adjusts the space allocated by b to n elements. If n is greater than b.Cap(),
-	// additional memory will be allocated. If n is smaller, the allocated memory may reduced.
-	Resize(n int)
-
-	// NewArray creates a new array from the memory buffers used
-	// by the builder and resets the Builder so it can be used to build
-	// a new array.
-	NewArray() Interface
-
-	init(capacity int)
-	resize(newBits int, init func(int))
-}
-
-// builder provides common functionality for managing the validity bitmap (nulls) when building arrays.
-type builder struct {
-	refCount   int64
-	mem        memory.Allocator
-	nullBitmap *memory.Buffer
-	nulls      int
-	length     int
-	capacity   int
-}
-
-// Retain increases the reference count by 1.
-// Retain may be called simultaneously from multiple goroutines.
-func (b *builder) Retain() {
-	atomic.AddInt64(&b.refCount, 1)
-}
-
-// Len returns the number of elements in the array builder.
-func (b *builder) Len() int { return b.length }
-
-// Cap returns the total number of elements that can be stored without allocating additional memory.
-func (b *builder) Cap() int { return b.capacity }
-
-// NullN returns the number of null values in the array builder.
-func (b *builder) NullN() int { return b.nulls }
-
-func (b *builder) init(capacity int) {
-	toAlloc := bitutil.CeilByte(capacity) / 8
-	b.nullBitmap = memory.NewResizableBuffer(b.mem)
-	b.nullBitmap.Resize(toAlloc)
-	b.capacity = capacity
-	memory.Set(b.nullBitmap.Buf(), 0)
-}
-
-func (b *builder) reset() {
-	if b.nullBitmap != nil {
-		b.nullBitmap.Release()
-		b.nullBitmap = nil
-	}
-
-	b.nulls = 0
-	b.length = 0
-	b.capacity = 0
-}
-
-func (b *builder) resize(newBits int, init func(int)) {
-	if b.nullBitmap == nil {
-		init(newBits)
-		return
-	}
-
-	newBytesN := bitutil.CeilByte(newBits) / 8
-	oldBytesN := b.nullBitmap.Len()
-	b.nullBitmap.Resize(newBytesN)
-	b.capacity = newBits
-	if oldBytesN < newBytesN {
-		// TODO(sgc): necessary?
-		memory.Set(b.nullBitmap.Buf()[oldBytesN:], 0)
-	}
-	if newBits < b.length {
-		b.length = newBits
-		b.nulls = newBits - bitutil.CountSetBits(b.nullBitmap.Buf(), 0, newBits)
-	}
-}
-
-func (b *builder) reserve(elements int, resize func(int)) {
-	if b.length+elements > b.capacity {
-		newCap := bitutil.NextPowerOf2(b.length + elements)
-		resize(newCap)
-	}
-}
-
-// unsafeAppendBoolsToBitmap appends the contents of valid to the validity bitmap.
-// As an optimization, if the valid slice is empty, the next length bits will be set to valid (not null).
-func (b *builder) unsafeAppendBoolsToBitmap(valid []bool, length int) {
-	if len(valid) == 0 {
-		b.unsafeSetValid(length)
-		return
-	}
-
-	byteOffset := b.length / 8
-	bitOffset := byte(b.length % 8)
-	nullBitmap := b.nullBitmap.Bytes()
-	bitSet := nullBitmap[byteOffset]
-
-	for _, v := range valid {
-		if bitOffset == 8 {
-			bitOffset = 0
-			nullBitmap[byteOffset] = bitSet
-			byteOffset++
-			bitSet = nullBitmap[byteOffset]
-		}
-
-		if v {
-			bitSet |= bitutil.BitMask[bitOffset]
-		} else {
-			bitSet &= bitutil.FlippedBitMask[bitOffset]
-			b.nulls++
-		}
-		bitOffset++
-	}
-
-	if bitOffset != 0 {
-		nullBitmap[byteOffset] = bitSet
-	}
-	b.length += len(valid)
-}
-
-// unsafeSetValid sets the next length bits to valid in the validity bitmap.
-func (b *builder) unsafeSetValid(length int) {
-	padToByte := min(8-(b.length%8), length)
-	if padToByte == 8 {
-		padToByte = 0
-	}
-	bits := b.nullBitmap.Bytes()
-	for i := b.length; i < b.length+padToByte; i++ {
-		bitutil.SetBit(bits, i)
-	}
-
-	start := (b.length + padToByte) / 8
-	fastLength := (length - padToByte) / 8
-	memory.Set(bits[start:start+fastLength], 0xff)
-
-	newLength := b.length + length
-	// trailing bytes
-	for i := b.length + padToByte + (fastLength * 8); i < newLength; i++ {
-		bitutil.SetBit(bits, i)
-	}
-
-	b.length = newLength
-}
-
-func (b *builder) UnsafeAppendBoolToBitmap(isValid bool) {
-	if isValid {
-		bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	} else {
-		b.nulls++
-	}
-	b.length++
-}
-
-func NewBuilder(mem memory.Allocator, dtype arrow.DataType) Builder {
-	// FIXME(sbinet): use a type switch on dtype instead?
-	switch dtype.ID() {
-	case arrow.NULL:
-		return NewNullBuilder(mem)
-	case arrow.BOOL:
-		return NewBooleanBuilder(mem)
-	case arrow.UINT8:
-		return NewUint8Builder(mem)
-	case arrow.INT8:
-		return NewInt8Builder(mem)
-	case arrow.UINT16:
-		return NewUint16Builder(mem)
-	case arrow.INT16:
-		return NewInt16Builder(mem)
-	case arrow.UINT32:
-		return NewUint32Builder(mem)
-	case arrow.INT32:
-		return NewInt32Builder(mem)
-	case arrow.UINT64:
-		return NewUint64Builder(mem)
-	case arrow.INT64:
-		return NewInt64Builder(mem)
-	case arrow.FLOAT16:
-		return NewFloat16Builder(mem)
-	case arrow.FLOAT32:
-		return NewFloat32Builder(mem)
-	case arrow.FLOAT64:
-		return NewFloat64Builder(mem)
-	case arrow.STRING:
-		return NewStringBuilder(mem)
-	case arrow.BINARY:
-		return NewBinaryBuilder(mem, arrow.BinaryTypes.Binary)
-	case arrow.FIXED_SIZE_BINARY:
-		typ := dtype.(*arrow.FixedSizeBinaryType)
-		return NewFixedSizeBinaryBuilder(mem, typ)
-	case arrow.DATE32:
-		return NewDate32Builder(mem)
-	case arrow.DATE64:
-		return NewDate64Builder(mem)
-	case arrow.TIMESTAMP:
-		typ := dtype.(*arrow.TimestampType)
-		return NewTimestampBuilder(mem, typ)
-	case arrow.TIME32:
-		typ := dtype.(*arrow.Time32Type)
-		return NewTime32Builder(mem, typ)
-	case arrow.TIME64:
-		typ := dtype.(*arrow.Time64Type)
-		return NewTime64Builder(mem, typ)
-	case arrow.INTERVAL:
-		switch dtype.(type) {
-		case *arrow.DayTimeIntervalType:
-			return NewDayTimeIntervalBuilder(mem)
-		case *arrow.MonthIntervalType:
-			return NewMonthIntervalBuilder(mem)
-		}
-	case arrow.DECIMAL:
-		if typ, ok := dtype.(*arrow.Decimal128Type); ok {
-			return NewDecimal128Builder(mem, typ)
-		}
-	case arrow.LIST:
-		typ := dtype.(*arrow.ListType)
-		return NewListBuilder(mem, typ.Elem())
-	case arrow.STRUCT:
-		typ := dtype.(*arrow.StructType)
-		return NewStructBuilder(mem, typ)
-	case arrow.UNION:
-	case arrow.DICTIONARY:
-	case arrow.MAP:
-	case arrow.EXTENSION:
-	case arrow.FIXED_SIZE_LIST:
-		typ := dtype.(*arrow.FixedSizeListType)
-		return NewFixedSizeListBuilder(mem, typ.Len(), typ.Elem())
-	case arrow.DURATION:
-		typ := dtype.(*arrow.DurationType)
-		return NewDurationBuilder(mem, typ)
-	}
-	panic(fmt.Errorf("arrow/array: unsupported builder for %T", dtype))
-}
diff --git a/go/arrow/array/builder_test.go b/go/arrow/array/builder_test.go
deleted file mode 100644
index 76dfaee..0000000
--- a/go/arrow/array/builder_test.go
+++ /dev/null
@@ -1,83 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"testing"
-
-	"github.com/apache/arrow/go/arrow/internal/testing/tools"
-	"github.com/apache/arrow/go/arrow/memory"
-	"github.com/stretchr/testify/assert"
-)
-
-func TestBuilder_Init(t *testing.T) {
-	type exp struct{ size int }
-	tests := []struct {
-		name string
-		cap  int
-
-		exp exp
-	}{
-		{"07 bits", 07, exp{size: 1}},
-		{"19 bits", 19, exp{size: 3}},
-	}
-	for _, test := range tests {
-		t.Run(test.name, func(t *testing.T) {
-			ab := &builder{mem: memory.NewGoAllocator()}
-			ab.init(test.cap)
-			assert.Equal(t, test.cap, ab.Cap(), "invalid capacity")
-			assert.Equal(t, test.exp.size, ab.nullBitmap.Len(), "invalid length")
-		})
-	}
-}
-
-func TestBuilder_UnsafeSetValid(t *testing.T) {
-	ab := &builder{mem: memory.NewGoAllocator()}
-	ab.init(32)
-	ab.unsafeAppendBoolsToBitmap(tools.Bools(0, 0, 0, 0, 0), 5)
-	assert.Equal(t, 5, ab.Len())
-	assert.Equal(t, []byte{0, 0, 0, 0}, ab.nullBitmap.Bytes())
-
-	ab.unsafeSetValid(17)
-	assert.Equal(t, []byte{0xe0, 0xff, 0x3f, 0}, ab.nullBitmap.Bytes())
-}
-
-func TestBuilder_resize(t *testing.T) {
-	b := &builder{mem: memory.NewGoAllocator()}
-	n := 64
-
-	b.init(n)
-	assert.Equal(t, n, b.Cap())
-	assert.Equal(t, 0, b.Len())
-
-	b.UnsafeAppendBoolToBitmap(true)
-	for i := 1; i < n; i++ {
-		b.UnsafeAppendBoolToBitmap(false)
-	}
-	assert.Equal(t, n, b.Cap())
-	assert.Equal(t, n, b.Len())
-	assert.Equal(t, n-1, b.NullN())
-
-	n = 5
-	b.resize(n, b.init)
-	assert.Equal(t, n, b.Len())
-	assert.Equal(t, n-1, b.NullN())
-
-	b.resize(32, b.init)
-	assert.Equal(t, n, b.Len())
-	assert.Equal(t, n-1, b.NullN())
-}
diff --git a/go/arrow/array/compare.go b/go/arrow/array/compare.go
deleted file mode 100644
index 537630d..0000000
--- a/go/arrow/array/compare.go
+++ /dev/null
@@ -1,474 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"math"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/float16"
-	"golang.org/x/xerrors"
-)
-
-// RecordEqual reports whether the two provided records are equal.
-func RecordEqual(left, right Record) bool {
-	switch {
-	case left.NumCols() != right.NumCols():
-		return false
-	case left.NumRows() != right.NumRows():
-		return false
-	}
-
-	for i := range left.Columns() {
-		lc := left.Column(i)
-		rc := right.Column(i)
-		if !ArrayEqual(lc, rc) {
-			return false
-		}
-	}
-	return true
-}
-
-// RecordApproxEqual reports whether the two provided records are approximately equal.
-// For non-floating point columns, it is equivalent to RecordEqual.
-func RecordApproxEqual(left, right Record, opts ...EqualOption) bool {
-	switch {
-	case left.NumCols() != right.NumCols():
-		return false
-	case left.NumRows() != right.NumRows():
-		return false
-	}
-
-	opt := newEqualOption(opts...)
-
-	for i := range left.Columns() {
-		lc := left.Column(i)
-		rc := right.Column(i)
-		if !arrayApproxEqual(lc, rc, opt) {
-			return false
-		}
-	}
-	return true
-}
-
-// ArrayEqual reports whether the two provided arrays are equal.
-func ArrayEqual(left, right Interface) bool {
-	switch {
-	case !baseArrayEqual(left, right):
-		return false
-	case left.Len() == 0:
-		return true
-	case left.NullN() == left.Len():
-		return true
-	}
-
-	// at this point, we know both arrays have same type, same length, same number of nulls
-	// and nulls at the same place.
-	// compare the values.
-
-	switch l := left.(type) {
-	case *Null:
-		return true
-	case *Boolean:
-		r := right.(*Boolean)
-		return arrayEqualBoolean(l, r)
-	case *FixedSizeBinary:
-		r := right.(*FixedSizeBinary)
-		return arrayEqualFixedSizeBinary(l, r)
-	case *Binary:
-		r := right.(*Binary)
-		return arrayEqualBinary(l, r)
-	case *String:
-		r := right.(*String)
-		return arrayEqualString(l, r)
-	case *Int8:
-		r := right.(*Int8)
-		return arrayEqualInt8(l, r)
-	case *Int16:
-		r := right.(*Int16)
-		return arrayEqualInt16(l, r)
-	case *Int32:
-		r := right.(*Int32)
-		return arrayEqualInt32(l, r)
-	case *Int64:
-		r := right.(*Int64)
-		return arrayEqualInt64(l, r)
-	case *Uint8:
-		r := right.(*Uint8)
-		return arrayEqualUint8(l, r)
-	case *Uint16:
-		r := right.(*Uint16)
-		return arrayEqualUint16(l, r)
-	case *Uint32:
-		r := right.(*Uint32)
-		return arrayEqualUint32(l, r)
-	case *Uint64:
-		r := right.(*Uint64)
-		return arrayEqualUint64(l, r)
-	case *Float16:
-		r := right.(*Float16)
-		return arrayEqualFloat16(l, r)
-	case *Float32:
-		r := right.(*Float32)
-		return arrayEqualFloat32(l, r)
-	case *Float64:
-		r := right.(*Float64)
-		return arrayEqualFloat64(l, r)
-	case *Decimal128:
-		r := right.(*Decimal128)
-		return arrayEqualDecimal128(l, r)
-	case *Date32:
-		r := right.(*Date32)
-		return arrayEqualDate32(l, r)
-	case *Date64:
-		r := right.(*Date64)
-		return arrayEqualDate64(l, r)
-	case *Time32:
-		r := right.(*Time32)
-		return arrayEqualTime32(l, r)
-	case *Time64:
-		r := right.(*Time64)
-		return arrayEqualTime64(l, r)
-	case *Timestamp:
-		r := right.(*Timestamp)
-		return arrayEqualTimestamp(l, r)
-	case *List:
-		r := right.(*List)
-		return arrayEqualList(l, r)
-	case *FixedSizeList:
-		r := right.(*FixedSizeList)
-		return arrayEqualFixedSizeList(l, r)
-	case *Struct:
-		r := right.(*Struct)
-		return arrayEqualStruct(l, r)
-	case *MonthInterval:
-		r := right.(*MonthInterval)
-		return arrayEqualMonthInterval(l, r)
-	case *DayTimeInterval:
-		r := right.(*DayTimeInterval)
-		return arrayEqualDayTimeInterval(l, r)
-	case *Duration:
-		r := right.(*Duration)
-		return arrayEqualDuration(l, r)
-
-	default:
-		panic(xerrors.Errorf("arrow/array: unknown array type %T", l))
-	}
-}
-
-// ArraySliceEqual reports whether slices left[lbeg:lend] and right[rbeg:rend] are equal.
-func ArraySliceEqual(left Interface, lbeg, lend int64, right Interface, rbeg, rend int64) bool {
-	l := NewSlice(left, lbeg, lend)
-	defer l.Release()
-	r := NewSlice(right, rbeg, rend)
-	defer r.Release()
-
-	return ArrayEqual(l, r)
-}
-
-const defaultAbsoluteTolerance = 1e-5
-
-type equalOption struct {
-	atol   float64 // absolute tolerance
-	nansEq bool    // whether NaNs are considered equal.
-}
-
-func (eq equalOption) f16(f1, f2 float16.Num) bool {
-	v1 := float64(f1.Float32())
-	v2 := float64(f2.Float32())
-	switch {
-	case eq.nansEq:
-		return math.Abs(v1-v2) <= eq.atol || (math.IsNaN(v1) && math.IsNaN(v2))
-	default:
-		return math.Abs(v1-v2) <= eq.atol
-	}
-}
-
-func (eq equalOption) f32(f1, f2 float32) bool {
-	v1 := float64(f1)
-	v2 := float64(f2)
-	switch {
-	case eq.nansEq:
-		return math.Abs(v1-v2) <= eq.atol || (math.IsNaN(v1) && math.IsNaN(v2))
-	default:
-		return math.Abs(v1-v2) <= eq.atol
-	}
-}
-
-func (eq equalOption) f64(v1, v2 float64) bool {
-	switch {
-	case eq.nansEq:
-		return math.Abs(v1-v2) <= eq.atol || (math.IsNaN(v1) && math.IsNaN(v2))
-	default:
-		return math.Abs(v1-v2) <= eq.atol
-	}
-}
-
-func newEqualOption(opts ...EqualOption) equalOption {
-	eq := equalOption{
-		atol:   defaultAbsoluteTolerance,
-		nansEq: false,
-	}
-	for _, opt := range opts {
-		opt(&eq)
-	}
-
-	return eq
-}
-
-// EqualOption is a functional option type used to configure how Records and Arrays are compared.
-type EqualOption func(*equalOption)
-
-// WithNaNsEqual configures the comparison functions so that NaNs are considered equal.
-func WithNaNsEqual(v bool) EqualOption {
-	return func(o *equalOption) {
-		o.nansEq = v
-	}
-}
-
-// WithAbsTolerance configures the comparison functions so that 2 floating point values
-// v1 and v2 are considered equal if |v1-v2| <= atol.
-func WithAbsTolerance(atol float64) EqualOption {
-	return func(o *equalOption) {
-		o.atol = atol
-	}
-}
-
-// ArrayApproxEqual reports whether the two provided arrays are approximately equal.
-// For non-floating point arrays, it is equivalent to ArrayEqual.
-func ArrayApproxEqual(left, right Interface, opts ...EqualOption) bool {
-	opt := newEqualOption(opts...)
-	return arrayApproxEqual(left, right, opt)
-}
-
-func arrayApproxEqual(left, right Interface, opt equalOption) bool {
-	switch {
-	case !baseArrayEqual(left, right):
-		return false
-	case left.Len() == 0:
-		return true
-	case left.NullN() == left.Len():
-		return true
-	}
-
-	// at this point, we know both arrays have same type, same length, same number of nulls
-	// and nulls at the same place.
-	// compare the values.
-
-	switch l := left.(type) {
-	case *Null:
-		return true
-	case *Boolean:
-		r := right.(*Boolean)
-		return arrayEqualBoolean(l, r)
-	case *FixedSizeBinary:
-		r := right.(*FixedSizeBinary)
-		return arrayEqualFixedSizeBinary(l, r)
-	case *Binary:
-		r := right.(*Binary)
-		return arrayEqualBinary(l, r)
-	case *String:
-		r := right.(*String)
-		return arrayEqualString(l, r)
-	case *Int8:
-		r := right.(*Int8)
-		return arrayEqualInt8(l, r)
-	case *Int16:
-		r := right.(*Int16)
-		return arrayEqualInt16(l, r)
-	case *Int32:
-		r := right.(*Int32)
-		return arrayEqualInt32(l, r)
-	case *Int64:
-		r := right.(*Int64)
-		return arrayEqualInt64(l, r)
-	case *Uint8:
-		r := right.(*Uint8)
-		return arrayEqualUint8(l, r)
-	case *Uint16:
-		r := right.(*Uint16)
-		return arrayEqualUint16(l, r)
-	case *Uint32:
-		r := right.(*Uint32)
-		return arrayEqualUint32(l, r)
-	case *Uint64:
-		r := right.(*Uint64)
-		return arrayEqualUint64(l, r)
-	case *Float16:
-		r := right.(*Float16)
-		return arrayApproxEqualFloat16(l, r, opt)
-	case *Float32:
-		r := right.(*Float32)
-		return arrayApproxEqualFloat32(l, r, opt)
-	case *Float64:
-		r := right.(*Float64)
-		return arrayApproxEqualFloat64(l, r, opt)
-	case *Decimal128:
-		r := right.(*Decimal128)
-		return arrayEqualDecimal128(l, r)
-	case *Date32:
-		r := right.(*Date32)
-		return arrayEqualDate32(l, r)
-	case *Date64:
-		r := right.(*Date64)
-		return arrayEqualDate64(l, r)
-	case *Time32:
-		r := right.(*Time32)
-		return arrayEqualTime32(l, r)
-	case *Time64:
-		r := right.(*Time64)
-		return arrayEqualTime64(l, r)
-	case *Timestamp:
-		r := right.(*Timestamp)
-		return arrayEqualTimestamp(l, r)
-	case *List:
-		r := right.(*List)
-		return arrayApproxEqualList(l, r, opt)
-	case *FixedSizeList:
-		r := right.(*FixedSizeList)
-		return arrayApproxEqualFixedSizeList(l, r, opt)
-	case *Struct:
-		r := right.(*Struct)
-		return arrayApproxEqualStruct(l, r, opt)
-	case *MonthInterval:
-		r := right.(*MonthInterval)
-		return arrayEqualMonthInterval(l, r)
-	case *DayTimeInterval:
-		r := right.(*DayTimeInterval)
-		return arrayEqualDayTimeInterval(l, r)
-	case *Duration:
-		r := right.(*Duration)
-		return arrayEqualDuration(l, r)
-
-	default:
-		panic(xerrors.Errorf("arrow/array: unknown array type %T", l))
-	}
-
-	return false
-}
-
-func baseArrayEqual(left, right Interface) bool {
-	switch {
-	case left.Len() != right.Len():
-		return false
-	case left.NullN() != right.NullN():
-		return false
-	case !arrow.TypeEqual(left.DataType(), right.DataType()): // We do not check for metadata as in the C++ implementation.
-		return false
-	case !validityBitmapEqual(left, right):
-		return false
-	}
-	return true
-}
-
-func validityBitmapEqual(left, right Interface) bool {
-	// TODO(alexandreyc): make it faster by comparing byte slices of the validity bitmap?
-	n := left.Len()
-	if n != right.Len() {
-		return false
-	}
-	for i := 0; i < n; i++ {
-		if left.IsNull(i) != right.IsNull(i) {
-			return false
-		}
-	}
-	return true
-}
-
-func arrayApproxEqualFloat16(left, right *Float16, opt equalOption) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		if !opt.f16(left.Value(i), right.Value(i)) {
-			return false
-		}
-	}
-	return true
-}
-
-func arrayApproxEqualFloat32(left, right *Float32, opt equalOption) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		if !opt.f32(left.Value(i), right.Value(i)) {
-			return false
-		}
-	}
-	return true
-}
-
-func arrayApproxEqualFloat64(left, right *Float64, opt equalOption) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		if !opt.f64(left.Value(i), right.Value(i)) {
-			return false
-		}
-	}
-	return true
-}
-
-func arrayApproxEqualList(left, right *List, opt equalOption) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		o := func() bool {
-			l := left.newListValue(i)
-			defer l.Release()
-			r := right.newListValue(i)
-			defer r.Release()
-			return arrayApproxEqual(l, r, opt)
-		}()
-		if !o {
-			return false
-		}
-	}
-	return true
-}
-
-func arrayApproxEqualFixedSizeList(left, right *FixedSizeList, opt equalOption) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		o := func() bool {
-			l := left.newListValue(i)
-			defer l.Release()
-			r := right.newListValue(i)
-			defer r.Release()
-			return arrayApproxEqual(l, r, opt)
-		}()
-		if !o {
-			return false
-		}
-	}
-	return true
-}
-
-func arrayApproxEqualStruct(left, right *Struct, opt equalOption) bool {
-	for i, lf := range left.fields {
-		rf := right.fields[i]
-		if !arrayApproxEqual(lf, rf, opt) {
-			return false
-		}
-	}
-	return true
-}
diff --git a/go/arrow/array/compare_test.go b/go/arrow/array/compare_test.go
deleted file mode 100644
index 3ed326b..0000000
--- a/go/arrow/array/compare_test.go
+++ /dev/null
@@ -1,531 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array_test
-
-import (
-	"fmt"
-	"math"
-	"testing"
-
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/float16"
-	"github.com/apache/arrow/go/arrow/internal/arrdata"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-func TestArrayEqual(t *testing.T) {
-	for name, recs := range arrdata.Records {
-		t.Run(name, func(t *testing.T) {
-			rec := recs[0]
-			schema := rec.Schema()
-			for i, col := range rec.Columns() {
-				t.Run(schema.Field(i).Name, func(t *testing.T) {
-					arr := col
-					if !array.ArrayEqual(arr, arr) {
-						t.Fatalf("identical arrays should compare equal:\narray=%v", arr)
-					}
-					sub1 := array.NewSlice(arr, 1, int64(arr.Len()))
-					defer sub1.Release()
-
-					sub2 := array.NewSlice(arr, 0, int64(arr.Len()-1))
-					defer sub2.Release()
-
-					if array.ArrayEqual(sub1, sub2) && name != "nulls" {
-						t.Fatalf("non-identical arrays should not compare equal:\nsub1=%v\nsub2=%v\narrf=%v\n", sub1, sub2, arr)
-					}
-				})
-			}
-		})
-	}
-}
-
-func TestArraySliceEqual(t *testing.T) {
-	for name, recs := range arrdata.Records {
-		t.Run(name, func(t *testing.T) {
-			rec := recs[0]
-			schema := rec.Schema()
-			for i, col := range rec.Columns() {
-				t.Run(schema.Field(i).Name, func(t *testing.T) {
-					arr := col
-					if !array.ArraySliceEqual(
-						arr, 0, int64(arr.Len()),
-						arr, 0, int64(arr.Len()),
-					) {
-						t.Fatalf("identical slices should compare equal:\narray=%v", arr)
-					}
-					sub1 := array.NewSlice(arr, 1, int64(arr.Len()))
-					defer sub1.Release()
-
-					sub2 := array.NewSlice(arr, 0, int64(arr.Len()-1))
-					defer sub2.Release()
-
-					if array.ArraySliceEqual(sub1, 0, int64(sub1.Len()), sub2, 0, int64(sub2.Len())) && name != "nulls" {
-						t.Fatalf("non-identical slices should not compare equal:\nsub1=%v\nsub2=%v\narrf=%v\n", sub1, sub2, arr)
-					}
-				})
-			}
-		})
-	}
-}
-
-func TestArrayApproxEqual(t *testing.T) {
-	for name, recs := range arrdata.Records {
-		t.Run(name, func(t *testing.T) {
-			rec := recs[0]
-			schema := rec.Schema()
-			for i, col := range rec.Columns() {
-				t.Run(schema.Field(i).Name, func(t *testing.T) {
-					arr := col
-					if !array.ArrayApproxEqual(arr, arr) {
-						t.Fatalf("identical arrays should compare equal:\narray=%v", arr)
-					}
-					sub1 := array.NewSlice(arr, 1, int64(arr.Len()))
-					defer sub1.Release()
-
-					sub2 := array.NewSlice(arr, 0, int64(arr.Len()-1))
-					defer sub2.Release()
-
-					if array.ArrayApproxEqual(sub1, sub2) && name != "nulls" {
-						t.Fatalf("non-identical arrays should not compare equal:\nsub1=%v\nsub2=%v\narrf=%v\n", sub1, sub2, arr)
-					}
-				})
-			}
-		})
-	}
-}
-
-func TestArrayApproxEqualFloats(t *testing.T) {
-	f16sFrom := func(vs []float64) []float16.Num {
-		o := make([]float16.Num, len(vs))
-		for i, v := range vs {
-			o[i] = float16.New(float32(v))
-		}
-		return o
-	}
-
-	for _, tc := range []struct {
-		name string
-		a1   interface{}
-		a2   interface{}
-		opts []array.EqualOption
-		want bool
-	}{
-		{
-			name: "f16",
-			a1:   f16sFrom([]float64{1, 2, 3, 4, 5, 6}),
-			a2:   f16sFrom([]float64{1, 2, 3, 4, 5, 6}),
-			want: true,
-		},
-		{
-			name: "f16-no-tol",
-			a1:   f16sFrom([]float64{1, 2, 3, 4, 5, 6}),
-			a2:   f16sFrom([]float64{1, 2, 3, 4, 5, 7}),
-			want: false,
-		},
-		{
-			name: "f16-tol-ok",
-			a1:   f16sFrom([]float64{1, 2, 3, 4, 5, 6}),
-			a2:   f16sFrom([]float64{1, 2, 3, 4, 5, 7}),
-			opts: []array.EqualOption{array.WithAbsTolerance(1)},
-			want: true,
-		},
-		{
-			name: "f16-nan",
-			a1:   f16sFrom([]float64{1, 2, 3, 4, 5, 6}),
-			a2:   f16sFrom([]float64{1, 2, 3, 4, 5, math.NaN()}),
-			want: false,
-		},
-		{
-			name: "f16-nan-not",
-			a1:   f16sFrom([]float64{1, 2, 3, 4, 5, 6}),
-			a2:   f16sFrom([]float64{1, 2, 3, 4, 5, math.NaN()}),
-			opts: []array.EqualOption{array.WithNaNsEqual(true)},
-			want: false,
-		},
-		{
-			name: "f16-nan-ok",
-			a1:   f16sFrom([]float64{1, 2, 3, 4, 5, math.NaN()}),
-			a2:   f16sFrom([]float64{1, 2, 3, 4, 5, math.NaN()}),
-			opts: []array.EqualOption{array.WithNaNsEqual(true)},
-			want: true,
-		},
-		{
-			name: "f16-nan-no-tol",
-			a1:   f16sFrom([]float64{1, 2, 3, 4, 5, math.NaN()}),
-			a2:   f16sFrom([]float64{1, 2, 3, 4, 6, math.NaN()}),
-			opts: []array.EqualOption{array.WithNaNsEqual(true)},
-			want: false,
-		},
-		{
-			name: "f16-nan-tol",
-			a1:   f16sFrom([]float64{1, 2, 3, 4, 5, math.NaN()}),
-			a2:   f16sFrom([]float64{1, 2, 3, 4, 6, math.NaN()}),
-			opts: []array.EqualOption{array.WithNaNsEqual(true), array.WithAbsTolerance(1)},
-			want: true,
-		},
-		{
-			name: "f32",
-			a1:   []float32{1, 2, 3, 4, 5, 6},
-			a2:   []float32{1, 2, 3, 4, 5, 6},
-			want: true,
-		},
-		{
-			name: "f32-no-tol",
-			a1:   []float32{1, 2, 3, 4, 5, 6},
-			a2:   []float32{1, 2, 3, 4, 5, 7},
-			want: false,
-		},
-		{
-			name: "f32-tol-ok",
-			a1:   []float32{1, 2, 3, 4, 5, 6},
-			a2:   []float32{1, 2, 3, 4, 5, 7},
-			opts: []array.EqualOption{array.WithAbsTolerance(1)},
-			want: true,
-		},
-		{
-			name: "f32-nan",
-			a1:   []float32{1, 2, 3, 4, 5, 6},
-			a2:   []float32{1, 2, 3, 4, 5, float32(math.NaN())},
-			want: false,
-		},
-		{
-			name: "f32-nan-not",
-			a1:   []float32{1, 2, 3, 4, 5, 6},
-			a2:   []float32{1, 2, 3, 4, 5, float32(math.NaN())},
-			opts: []array.EqualOption{array.WithNaNsEqual(true)},
-			want: false,
-		},
-		{
-			name: "f32-nan-ok",
-			a1:   []float32{1, 2, 3, 4, 5, float32(math.NaN())},
-			a2:   []float32{1, 2, 3, 4, 5, float32(math.NaN())},
-			opts: []array.EqualOption{array.WithNaNsEqual(true)},
-			want: true,
-		},
-		{
-			name: "f32-nan-no-tol",
-			a1:   []float32{1, 2, 3, 4, 5, float32(math.NaN())},
-			a2:   []float32{1, 2, 3, 4, 6, float32(math.NaN())},
-			opts: []array.EqualOption{array.WithNaNsEqual(true)},
-			want: false,
-		},
-		{
-			name: "f32-nan-tol",
-			a1:   []float32{1, 2, 3, 4, 5, float32(math.NaN())},
-			a2:   []float32{1, 2, 3, 4, 6, float32(math.NaN())},
-			opts: []array.EqualOption{array.WithNaNsEqual(true), array.WithAbsTolerance(1)},
-			want: true,
-		},
-		{
-			name: "f64",
-			a1:   []float64{1, 2, 3, 4, 5, 6},
-			a2:   []float64{1, 2, 3, 4, 5, 6},
-			want: true,
-		},
-		{
-			name: "f64-no-tol",
-			a1:   []float64{1, 2, 3, 4, 5, 6},
-			a2:   []float64{1, 2, 3, 4, 5, 7},
-			want: false,
-		},
-		{
-			name: "f64-tol-ok",
-			a1:   []float64{1, 2, 3, 4, 5, 6},
-			a2:   []float64{1, 2, 3, 4, 5, 7},
-			opts: []array.EqualOption{array.WithAbsTolerance(1)},
-			want: true,
-		},
-		{
-			name: "f64-nan",
-			a1:   []float64{1, 2, 3, 4, 5, 6},
-			a2:   []float64{1, 2, 3, 4, 5, math.NaN()},
-			want: false,
-		},
-		{
-			name: "f64-nan-not",
-			a1:   []float64{1, 2, 3, 4, 5, 6},
-			a2:   []float64{1, 2, 3, 4, 5, math.NaN()},
-			opts: []array.EqualOption{array.WithNaNsEqual(true)},
-			want: false,
-		},
-		{
-			name: "f64-nan-ok",
-			a1:   []float64{1, 2, 3, 4, 5, math.NaN()},
-			a2:   []float64{1, 2, 3, 4, 5, math.NaN()},
-			opts: []array.EqualOption{array.WithNaNsEqual(true)},
-			want: true,
-		},
-		{
-			name: "f64-nan-no-tol",
-			a1:   []float64{1, 2, 3, 4, 5, math.NaN()},
-			a2:   []float64{1, 2, 3, 4, 6, math.NaN()},
-			opts: []array.EqualOption{array.WithNaNsEqual(true)},
-			want: false,
-		},
-		{
-			name: "f64-nan-tol",
-			a1:   []float64{1, 2, 3, 4, 5, math.NaN()},
-			a2:   []float64{1, 2, 3, 4, 6, math.NaN()},
-			opts: []array.EqualOption{array.WithNaNsEqual(true), array.WithAbsTolerance(1)},
-			want: true,
-		},
-	} {
-		t.Run(tc.name, func(t *testing.T) {
-			mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-			defer mem.AssertSize(t, 0)
-
-			a1 := arrayOf(mem, tc.a1, nil)
-			defer a1.Release()
-			a2 := arrayOf(mem, tc.a2, nil)
-			defer a2.Release()
-
-			if got, want := array.ArrayApproxEqual(a1, a2, tc.opts...), tc.want; got != want {
-				t.Fatalf("invalid comparison: got=%v, want=%v\na1: %v\na2: %v\n", got, want, a1, a2)
-			}
-		})
-	}
-}
-
-func arrayOf(mem memory.Allocator, a interface{}, valids []bool) array.Interface {
-	if mem == nil {
-		mem = memory.NewGoAllocator()
-	}
-
-	switch a := a.(type) {
-	case []float16.Num:
-		bldr := array.NewFloat16Builder(mem)
-		defer bldr.Release()
-
-		bldr.AppendValues(a, valids)
-		return bldr.NewFloat16Array()
-
-	case []float32:
-		bldr := array.NewFloat32Builder(mem)
-		defer bldr.Release()
-
-		bldr.AppendValues(a, valids)
-		return bldr.NewFloat32Array()
-
-	case []float64:
-		bldr := array.NewFloat64Builder(mem)
-		defer bldr.Release()
-
-		bldr.AppendValues(a, valids)
-		return bldr.NewFloat64Array()
-
-	default:
-		panic(fmt.Errorf("arrdata: invalid data slice type %T", a))
-	}
-}
-
-func TestArrayEqualBaseArray(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	b1 := array.NewBooleanBuilder(mem)
-	defer b1.Release()
-	b1.Append(true)
-	a1 := b1.NewBooleanArray()
-	defer a1.Release()
-
-	b2 := array.NewBooleanBuilder(mem)
-	defer b2.Release()
-	a2 := b2.NewBooleanArray()
-	defer a2.Release()
-
-	if array.ArrayEqual(a1, a2) {
-		t.Errorf("two arrays with different lengths must not be equal")
-	}
-
-	b3 := array.NewBooleanBuilder(mem)
-	defer b3.Release()
-	b3.AppendNull()
-	a3 := b3.NewBooleanArray()
-	defer a3.Release()
-
-	if array.ArrayEqual(a1, a3) {
-		t.Errorf("two arrays with different number of null values must not be equal")
-	}
-
-	b4 := array.NewInt32Builder(mem)
-	defer b4.Release()
-	b4.Append(0)
-	a4 := b4.NewInt32Array()
-	defer a4.Release()
-
-	if array.ArrayEqual(a1, a4) {
-		t.Errorf("two arrays with different types must not be equal")
-	}
-
-	b5 := array.NewBooleanBuilder(mem)
-	defer b5.Release()
-	b5.AppendNull()
-	b5.Append(true)
-	a5 := b5.NewBooleanArray()
-	defer a5.Release()
-	b1.AppendNull()
-
-	if array.ArrayEqual(a1, a5) {
-		t.Errorf("two arrays with different validity bitmaps must not be equal")
-	}
-}
-
-func TestArrayEqualNull(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	null := array.NewNull(0)
-	defer null.Release()
-
-	if !array.ArrayEqual(null, null) {
-		t.Fatalf("identical arrays should compare equal")
-	}
-
-	n0 := array.NewNull(10)
-	defer n0.Release()
-
-	n1 := array.NewNull(10)
-	defer n1.Release()
-
-	if !array.ArrayEqual(n0, n0) {
-		t.Fatalf("identical arrays should compare equal")
-	}
-	if !array.ArrayEqual(n1, n1) {
-		t.Fatalf("identical arrays should compare equal")
-	}
-	if !array.ArrayEqual(n0, n1) || !array.ArrayEqual(n1, n0) {
-		t.Fatalf("n0 and n1 should compare equal")
-	}
-
-	sub07 := array.NewSlice(n0, 0, 7)
-	defer sub07.Release()
-	sub08 := array.NewSlice(n0, 0, 8)
-	defer sub08.Release()
-	sub19 := array.NewSlice(n0, 1, 9)
-	defer sub19.Release()
-
-	if !array.ArrayEqual(sub08, sub19) {
-		t.Fatalf("sub08 and sub19 should compare equal")
-	}
-
-	if array.ArrayEqual(sub08, sub07) {
-		t.Fatalf("sub08 and sub07 should not compare equal")
-	}
-}
-
-func TestArrayEqualMaskedArray(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	ab := array.NewInt32Builder(mem)
-	defer ab.Release()
-
-	valids := []bool{false, false, false, false}
-	ab.AppendValues([]int32{1, 2, 0, 4}, valids)
-
-	a1 := ab.NewInt32Array()
-	defer a1.Release()
-
-	ab.AppendValues([]int32{1, 2, 3, 4}, valids)
-	a2 := ab.NewInt32Array()
-	defer a2.Release()
-
-	if !array.ArrayEqual(a1, a1) || !array.ArrayEqual(a2, a2) {
-		t.Errorf("an array must be equal to itself")
-	}
-
-	if !array.ArrayEqual(a1, a2) {
-		t.Errorf("%v must be equal to %v", a1, a2)
-	}
-}
-
-func TestArrayEqualDifferentMaskedValues(t *testing.T) {
-	// test 2 int32 arrays, with same nulls (but different masked values) compare equal.
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	ab := array.NewInt32Builder(mem)
-	defer ab.Release()
-
-	valids := []bool{true, true, false, true}
-	ab.AppendValues([]int32{1, 2, 0, 4}, valids)
-
-	a1 := ab.NewInt32Array()
-	defer a1.Release()
-
-	ab.AppendValues([]int32{1, 2, 3, 4}, valids)
-	a2 := ab.NewInt32Array()
-	defer a2.Release()
-
-	if !array.ArrayEqual(a1, a1) || !array.ArrayEqual(a2, a2) {
-		t.Errorf("an array must be equal to itself")
-	}
-
-	if !array.ArrayEqual(a1, a2) {
-		t.Errorf("%v must be equal to %v", a1, a2)
-	}
-}
-
-func TestRecordEqual(t *testing.T) {
-	for name, recs := range arrdata.Records {
-		t.Run(name, func(t *testing.T) {
-			rec0 := recs[0]
-			rec1 := recs[1]
-			if !array.RecordEqual(rec0, rec0) {
-				t.Fatalf("identical records should compare equal:\nrecord:\n%v", rec0)
-			}
-
-			if array.RecordEqual(rec0, rec1) && name != "nulls" {
-				t.Fatalf("non-identical records should not compare equal:\nrec0:\n%v\nrec1:\n%v", rec0, rec1)
-			}
-
-			sub00 := rec0.NewSlice(0, recs[0].NumRows()-1)
-			defer sub00.Release()
-			sub01 := rec0.NewSlice(1, recs[0].NumRows())
-			defer sub01.Release()
-
-			if array.RecordEqual(sub00, sub01) && name != "nulls" {
-				t.Fatalf("non-identical records should not compare equal:\nsub0:\n%v\nsub1:\n%v", sub00, sub01)
-			}
-		})
-	}
-}
-
-func TestRecordApproxEqual(t *testing.T) {
-	for name, recs := range arrdata.Records {
-		t.Run(name, func(t *testing.T) {
-			rec0 := recs[0]
-			rec1 := recs[1]
-			if !array.RecordApproxEqual(rec0, rec0) {
-				t.Fatalf("identical records should compare equal:\nrecord:\n%v", rec0)
-			}
-
-			if array.RecordApproxEqual(rec0, rec1) && name != "nulls" {
-				t.Fatalf("non-identical records should not compare equal:\nrec0:\n%v\nrec1:\n%v", rec0, rec1)
-			}
-
-			sub00 := rec0.NewSlice(0, recs[0].NumRows()-1)
-			defer sub00.Release()
-			sub01 := rec0.NewSlice(1, recs[0].NumRows())
-			defer sub01.Release()
-
-			if array.RecordApproxEqual(sub00, sub01) && name != "nulls" {
-				t.Fatalf("non-identical records should not compare equal:\nsub0:\n%v\nsub1:\n%v", sub00, sub01)
-			}
-		})
-	}
-}
diff --git a/go/arrow/array/data.go b/go/arrow/array/data.go
deleted file mode 100644
index 2648961..0000000
--- a/go/arrow/array/data.go
+++ /dev/null
@@ -1,179 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-// Data represents the memory and metadata of an Arrow array.
-type Data struct {
-	refCount  int64
-	dtype     arrow.DataType
-	nulls     int
-	offset    int
-	length    int
-	buffers   []*memory.Buffer // TODO(sgc): should this be an interface?
-	childData []*Data          // TODO(sgc): managed by ListArray, StructArray and UnionArray types
-}
-
-// NewData creates a new Data.
-func NewData(dtype arrow.DataType, length int, buffers []*memory.Buffer, childData []*Data, nulls, offset int) *Data {
-	for _, b := range buffers {
-		if b != nil {
-			b.Retain()
-		}
-	}
-
-	for _, child := range childData {
-		if child != nil {
-			child.Retain()
-		}
-	}
-
-	return &Data{
-		refCount:  1,
-		dtype:     dtype,
-		nulls:     nulls,
-		length:    length,
-		offset:    offset,
-		buffers:   buffers,
-		childData: childData,
-	}
-}
-
-// Reset sets the Data for re-use.
-func (d *Data) Reset(dtype arrow.DataType, length int, buffers []*memory.Buffer, childData []*Data, nulls, offset int) {
-	// Retain new buffers before releasing existing buffers in-case they're the same ones to prevent accidental premature
-	// release.
-	for _, b := range buffers {
-		if b != nil {
-			b.Retain()
-		}
-	}
-	for _, b := range d.buffers {
-		if b != nil {
-			b.Release()
-		}
-	}
-	d.buffers = buffers
-
-	// Retain new children data before releasing existing children data in-case they're the same ones to prevent accidental
-	// premature release.
-	for _, d := range childData {
-		if d != nil {
-			d.Retain()
-		}
-	}
-	for _, d := range d.childData {
-		if d != nil {
-			d.Release()
-		}
-	}
-	d.childData = childData
-
-	d.dtype = dtype
-	d.length = length
-	d.nulls = nulls
-	d.offset = offset
-}
-
-// Retain increases the reference count by 1.
-// Retain may be called simultaneously from multiple goroutines.
-func (d *Data) Retain() {
-	atomic.AddInt64(&d.refCount, 1)
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-// Release may be called simultaneously from multiple goroutines.
-func (d *Data) Release() {
-	debug.Assert(atomic.LoadInt64(&d.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&d.refCount, -1) == 0 {
-		for _, b := range d.buffers {
-			if b != nil {
-				b.Release()
-			}
-		}
-
-		for _, b := range d.childData {
-			b.Release()
-		}
-		d.buffers, d.childData = nil, nil
-	}
-}
-
-// DataType returns the DataType of the data.
-func (d *Data) DataType() arrow.DataType { return d.dtype }
-
-// NullN returns the number of nulls.
-func (d *Data) NullN() int { return d.nulls }
-
-// Len returns the length.
-func (d *Data) Len() int { return d.length }
-
-// Offset returns the offset.
-func (d *Data) Offset() int { return d.offset }
-
-// Buffers returns the buffers.
-func (d *Data) Buffers() []*memory.Buffer { return d.buffers }
-
-// NewSliceData returns a new slice that shares backing data with the input.
-// The returned Data slice starts at i and extends j-i elements, such as:
-//    slice := data[i:j]
-// The returned value must be Release'd after use.
-//
-// NewSliceData panics if the slice is outside the valid range of the input Data.
-// NewSliceData panics if j < i.
-func NewSliceData(data *Data, i, j int64) *Data {
-	if j > int64(data.length) || i > j || data.offset+int(i) > data.offset+data.length {
-		panic("arrow/array: index out of range")
-	}
-
-	for _, b := range data.buffers {
-		if b != nil {
-			b.Retain()
-		}
-	}
-
-	for _, child := range data.childData {
-		if child != nil {
-			child.Retain()
-		}
-	}
-
-	o := &Data{
-		refCount:  1,
-		dtype:     data.dtype,
-		nulls:     UnknownNullCount,
-		length:    int(j - i),
-		offset:    data.offset + int(i),
-		buffers:   data.buffers,
-		childData: data.childData,
-	}
-
-	if data.nulls == 0 {
-		o.nulls = 0
-	}
-
-	return o
-}
diff --git a/go/arrow/array/data_test.go b/go/arrow/array/data_test.go
deleted file mode 100644
index de87b80..0000000
--- a/go/arrow/array/data_test.go
+++ /dev/null
@@ -1,51 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"testing"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/memory"
-	"github.com/stretchr/testify/assert"
-)
-
-func TestDataReset(t *testing.T) {
-	var (
-		buffers1 = make([]*memory.Buffer, 0, 3)
-		buffers2 = make([]*memory.Buffer, 0, 3)
-	)
-	for i := 0; i < cap(buffers1); i++ {
-		buffers1 = append(buffers1, memory.NewBufferBytes([]byte("some-bytes1")))
-		buffers2 = append(buffers2, memory.NewBufferBytes([]byte("some-bytes2")))
-	}
-
-	data := NewData(&arrow.StringType{}, 10, buffers1, nil, 0, 0)
-	data.Reset(&arrow.Int64Type{}, 5, buffers2, nil, 1, 2)
-
-	for i := 0; i < 2; i++ {
-		assert.Equal(t, buffers2, data.Buffers())
-		assert.Equal(t, &arrow.Int64Type{}, data.DataType())
-		assert.Equal(t, 1, data.NullN())
-		assert.Equal(t, 2, data.Offset())
-		assert.Equal(t, 5, data.Len())
-
-		// Make sure it works when resetting the data with its own buffers (new buffers are retained
-		// before old ones are released.)
-		data.Reset(&arrow.Int64Type{}, 5, data.Buffers(), nil, 1, 2)
-	}
-}
diff --git a/go/arrow/array/decimal128.go b/go/arrow/array/decimal128.go
deleted file mode 100644
index 3acf6b9..0000000
--- a/go/arrow/array/decimal128.go
+++ /dev/null
@@ -1,235 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array // import "github.com/apache/arrow/go/arrow/array"
-
-import (
-	"fmt"
-	"strings"
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/decimal128"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-// A type which represents an immutable sequence of 128-bit decimal values.
-type Decimal128 struct {
-	array
-
-	values []decimal128.Num
-}
-
-func NewDecimal128Data(data *Data) *Decimal128 {
-	a := &Decimal128{}
-	a.refCount = 1
-	a.setData(data)
-	return a
-}
-
-func (a *Decimal128) Value(i int) decimal128.Num { return a.values[i] }
-
-func (a *Decimal128) Values() []decimal128.Num { return a.values }
-
-func (a *Decimal128) String() string {
-	o := new(strings.Builder)
-	o.WriteString("[")
-	for i := 0; i < a.Len(); i++ {
-		if i > 0 {
-			fmt.Fprintf(o, " ")
-		}
-		switch {
-		case a.IsNull(i):
-			o.WriteString("(null)")
-		default:
-			fmt.Fprintf(o, "%v", a.Value(i))
-		}
-	}
-	o.WriteString("]")
-	return o.String()
-}
-
-func (a *Decimal128) setData(data *Data) {
-	a.array.setData(data)
-	vals := data.buffers[1]
-	if vals != nil {
-		a.values = arrow.Decimal128Traits.CastFromBytes(vals.Bytes())
-		beg := a.array.data.offset
-		end := beg + a.array.data.length
-		a.values = a.values[beg:end]
-	}
-}
-
-func arrayEqualDecimal128(left, right *Decimal128) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		if left.Value(i) != right.Value(i) {
-			return false
-		}
-	}
-	return true
-}
-
-type Decimal128Builder struct {
-	builder
-
-	dtype   *arrow.Decimal128Type
-	data    *memory.Buffer
-	rawData []decimal128.Num
-}
-
-func NewDecimal128Builder(mem memory.Allocator, dtype *arrow.Decimal128Type) *Decimal128Builder {
-	return &Decimal128Builder{
-		builder: builder{refCount: 1, mem: mem},
-		dtype:   dtype,
-	}
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-func (b *Decimal128Builder) Release() {
-	debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&b.refCount, -1) == 0 {
-		if b.nullBitmap != nil {
-			b.nullBitmap.Release()
-			b.nullBitmap = nil
-		}
-		if b.data != nil {
-			b.data.Release()
-			b.data = nil
-			b.rawData = nil
-		}
-	}
-}
-
-func (b *Decimal128Builder) Append(v decimal128.Num) {
-	b.Reserve(1)
-	b.UnsafeAppend(v)
-}
-
-func (b *Decimal128Builder) UnsafeAppend(v decimal128.Num) {
-	bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	b.rawData[b.length] = v
-	b.length++
-}
-
-func (b *Decimal128Builder) AppendNull() {
-	b.Reserve(1)
-	b.UnsafeAppendBoolToBitmap(false)
-}
-
-func (b *Decimal128Builder) UnsafeAppendBoolToBitmap(isValid bool) {
-	if isValid {
-		bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	} else {
-		b.nulls++
-	}
-	b.length++
-}
-
-// AppendValues will append the values in the v slice. The valid slice determines which values
-// in v are valid (not null). The valid slice must either be empty or be equal in length to v. If empty,
-// all values in v are appended and considered valid.
-func (b *Decimal128Builder) AppendValues(v []decimal128.Num, valid []bool) {
-	if len(v) != len(valid) && len(valid) != 0 {
-		panic("len(v) != len(valid) && len(valid) != 0")
-	}
-
-	if len(v) == 0 {
-		return
-	}
-
-	b.Reserve(len(v))
-	if len(v) > 0 {
-		arrow.Decimal128Traits.Copy(b.rawData[b.length:], v)
-	}
-	b.builder.unsafeAppendBoolsToBitmap(valid, len(v))
-}
-
-func (b *Decimal128Builder) init(capacity int) {
-	b.builder.init(capacity)
-
-	b.data = memory.NewResizableBuffer(b.mem)
-	bytesN := arrow.Decimal128Traits.BytesRequired(capacity)
-	b.data.Resize(bytesN)
-	b.rawData = arrow.Decimal128Traits.CastFromBytes(b.data.Bytes())
-}
-
-// Reserve ensures there is enough space for appending n elements
-// by checking the capacity and calling Resize if necessary.
-func (b *Decimal128Builder) Reserve(n int) {
-	b.builder.reserve(n, b.Resize)
-}
-
-// Resize adjusts the space allocated by b to n elements. If n is greater than b.Cap(),
-// additional memory will be allocated. If n is smaller, the allocated memory may reduced.
-func (b *Decimal128Builder) Resize(n int) {
-	nBuilder := n
-	if n < minBuilderCapacity {
-		n = minBuilderCapacity
-	}
-
-	if b.capacity == 0 {
-		b.init(n)
-	} else {
-		b.builder.resize(nBuilder, b.init)
-		b.data.Resize(arrow.Decimal128Traits.BytesRequired(n))
-		b.rawData = arrow.Decimal128Traits.CastFromBytes(b.data.Bytes())
-	}
-}
-
-// NewArray creates a Decimal128 array from the memory buffers used by the builder and resets the Decimal128Builder
-// so it can be used to build a new array.
-func (b *Decimal128Builder) NewArray() Interface {
-	return b.NewDecimal128Array()
-}
-
-// NewDecimal128Array creates a Decimal128 array from the memory buffers used by the builder and resets the Decimal128Builder
-// so it can be used to build a new array.
-func (b *Decimal128Builder) NewDecimal128Array() (a *Decimal128) {
-	data := b.newData()
-	a = NewDecimal128Data(data)
-	data.Release()
-	return
-}
-
-func (b *Decimal128Builder) newData() (data *Data) {
-	bytesRequired := arrow.Decimal128Traits.BytesRequired(b.length)
-	if bytesRequired > 0 && bytesRequired < b.data.Len() {
-		// trim buffers
-		b.data.Resize(bytesRequired)
-	}
-	data = NewData(b.dtype, b.length, []*memory.Buffer{b.nullBitmap, b.data}, nil, b.nulls, 0)
-	b.reset()
-
-	if b.data != nil {
-		b.data.Release()
-		b.data = nil
-		b.rawData = nil
-	}
-
-	return
-}
-
-var (
-	_ Interface = (*Decimal128)(nil)
-	_ Builder   = (*Decimal128Builder)(nil)
-)
diff --git a/go/arrow/array/decimal128_test.go b/go/arrow/array/decimal128_test.go
deleted file mode 100644
index 5a39d92..0000000
--- a/go/arrow/array/decimal128_test.go
+++ /dev/null
@@ -1,179 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array_test
-
-import (
-	"testing"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/decimal128"
-	"github.com/apache/arrow/go/arrow/memory"
-	"github.com/stretchr/testify/assert"
-)
-
-func TestNewDecimal128Builder(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	ab := array.NewDecimal128Builder(mem, &arrow.Decimal128Type{Precision: 10, Scale: 1})
-	defer ab.Release()
-
-	ab.Retain()
-	ab.Release()
-
-	want := []decimal128.Num{
-		decimal128.New(1, 1),
-		decimal128.New(2, 2),
-		decimal128.New(3, 3),
-		{},
-		decimal128.FromI64(-5),
-		decimal128.FromI64(-6),
-		{},
-		decimal128.FromI64(8),
-		decimal128.FromI64(9),
-		decimal128.FromI64(10),
-	}
-	valids := []bool{true, true, true, false, true, true, false, true, true, true}
-
-	for i, valid := range valids {
-		switch {
-		case valid:
-			ab.Append(want[i])
-		default:
-			ab.AppendNull()
-		}
-	}
-
-	// check state of builder before NewDecimal128Array
-	assert.Equal(t, 10, ab.Len(), "unexpected Len()")
-	assert.Equal(t, 2, ab.NullN(), "unexpected NullN()")
-
-	a := ab.NewArray().(*array.Decimal128)
-	a.Retain()
-	a.Release()
-
-	// check state of builder after NewDecimal128Array
-	assert.Zero(t, ab.Len(), "unexpected ArrayBuilder.Len(), NewDecimal128Array did not reset state")
-	assert.Zero(t, ab.Cap(), "unexpected ArrayBuilder.Cap(), NewDecimal128Array did not reset state")
-	assert.Zero(t, ab.NullN(), "unexpected ArrayBuilder.NullN(), NewDecimal128Array did not reset state")
-
-	// check state of array
-	assert.Equal(t, 2, a.NullN(), "unexpected null count")
-
-	assert.Equal(t, want, a.Values(), "unexpected Decimal128Values")
-	assert.Equal(t, []byte{0xb7}, a.NullBitmapBytes()[:1]) // 4 bytes due to minBuilderCapacity
-	assert.Len(t, a.Values(), 10, "unexpected length of Decimal128Values")
-
-	a.Release()
-	ab.Append(decimal128.FromI64(7))
-	ab.Append(decimal128.FromI64(8))
-
-	a = ab.NewDecimal128Array()
-
-	assert.Equal(t, 0, a.NullN())
-	assert.Equal(t, []decimal128.Num{decimal128.FromI64(7), decimal128.FromI64(8)}, a.Values())
-	assert.Len(t, a.Values(), 2)
-
-	a.Release()
-}
-
-func TestDecimal128Builder_Empty(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	ab := array.NewDecimal128Builder(mem, &arrow.Decimal128Type{Precision: 10, Scale: 1})
-	defer ab.Release()
-
-	want := []decimal128.Num{decimal128.FromI64(3), decimal128.FromI64(4)}
-
-	ab.AppendValues([]decimal128.Num{}, nil)
-	a := ab.NewDecimal128Array()
-	assert.Zero(t, a.Len())
-	a.Release()
-
-	ab.AppendValues(nil, nil)
-	a = ab.NewDecimal128Array()
-	assert.Zero(t, a.Len())
-	a.Release()
-
-	ab.AppendValues(want, nil)
-	a = ab.NewDecimal128Array()
-	assert.Equal(t, want, a.Values())
-	a.Release()
-
-	ab.AppendValues([]decimal128.Num{}, nil)
-	ab.AppendValues(want, nil)
-	a = ab.NewDecimal128Array()
-	assert.Equal(t, want, a.Values())
-	a.Release()
-
-	ab.AppendValues(want, nil)
-	ab.AppendValues([]decimal128.Num{}, nil)
-	a = ab.NewDecimal128Array()
-	assert.Equal(t, want, a.Values())
-	a.Release()
-}
-
-func TestDecimal128Slice(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	dtype := &arrow.Decimal128Type{Precision: 10, Scale: 1}
-	b := array.NewDecimal128Builder(mem, dtype)
-	defer b.Release()
-
-	var data = []decimal128.Num{
-		decimal128.FromI64(-1),
-		decimal128.FromI64(+0),
-		decimal128.FromI64(+1),
-		decimal128.New(-4, 4),
-	}
-	b.AppendValues(data[:2], nil)
-	b.AppendNull()
-	b.Append(data[3])
-
-	arr := b.NewDecimal128Array()
-	defer arr.Release()
-
-	if got, want := arr.Len(), len(data); got != want {
-		t.Fatalf("invalid array length: got=%d, want=%d", got, want)
-	}
-
-	slice := array.NewSliceData(arr.Data(), 2, 4)
-	defer slice.Release()
-
-	sub1 := array.MakeFromData(slice)
-	defer sub1.Release()
-
-	v, ok := sub1.(*array.Decimal128)
-	if !ok {
-		t.Fatalf("could not type-assert to array.String")
-	}
-
-	if got, want := v.String(), `[(null) {4 -4}]`; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-
-	if got, want := v.NullN(), 1; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-
-	if got, want := v.Data().Offset(), 2; got != want {
-		t.Fatalf("invalid offset: got=%d, want=%d", got, want)
-	}
-}
diff --git a/go/arrow/array/doc.go b/go/arrow/array/doc.go
deleted file mode 100644
index 5cf8540..0000000
--- a/go/arrow/array/doc.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-/*
-Package array provides implementations of various Arrow array types.
-*/
-package array
diff --git a/go/arrow/array/fixed_size_list.go b/go/arrow/array/fixed_size_list.go
deleted file mode 100644
index 5cfaa5c..0000000
--- a/go/arrow/array/fixed_size_list.go
+++ /dev/null
@@ -1,240 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"fmt"
-	"strings"
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-// FixedSizeList represents an immutable sequence of N array values.
-type FixedSizeList struct {
-	array
-	n      int32
-	values Interface
-}
-
-// NewFixedSizeListData returns a new List array value, from data.
-func NewFixedSizeListData(data *Data) *FixedSizeList {
-	a := &FixedSizeList{}
-	a.refCount = 1
-	a.setData(data)
-	return a
-}
-
-func (a *FixedSizeList) ListValues() Interface { return a.values }
-
-func (a *FixedSizeList) String() string {
-	o := new(strings.Builder)
-	o.WriteString("[")
-	for i := 0; i < a.Len(); i++ {
-		if i > 0 {
-			o.WriteString(" ")
-		}
-		if !a.IsValid(i) {
-			o.WriteString("(null)")
-			continue
-		}
-		sub := a.newListValue(i)
-		fmt.Fprintf(o, "%v", sub)
-		sub.Release()
-	}
-	o.WriteString("]")
-	return o.String()
-}
-
-func (a *FixedSizeList) newListValue(i int) Interface {
-	n := int64(a.n)
-	off := int64(a.array.data.offset)
-	beg := (off + int64(i)) * n
-	end := (off + int64(i+1)) * n
-	sli := NewSlice(a.values, beg, end)
-	return sli
-}
-
-func (a *FixedSizeList) setData(data *Data) {
-	a.array.setData(data)
-	a.n = a.DataType().(*arrow.FixedSizeListType).Len()
-	a.values = MakeFromData(data.childData[0])
-}
-
-func arrayEqualFixedSizeList(left, right *FixedSizeList) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		o := func() bool {
-			l := left.newListValue(i)
-			defer l.Release()
-			r := right.newListValue(i)
-			defer r.Release()
-			return ArrayEqual(l, r)
-		}()
-		if !o {
-			return false
-		}
-	}
-	return true
-}
-
-// Len returns the number of elements in the array.
-func (a *FixedSizeList) Len() int { return a.array.Len() }
-
-func (a *FixedSizeList) Retain() {
-	a.array.Retain()
-	a.values.Retain()
-}
-
-func (a *FixedSizeList) Release() {
-	a.array.Release()
-	a.values.Release()
-}
-
-type FixedSizeListBuilder struct {
-	builder
-
-	etype  arrow.DataType // data type of the list's elements.
-	n      int32          // number of elements in the fixed-size list.
-	values Builder        // value builder for the list's elements.
-}
-
-// NewFixedSizeListBuilder returns a builder, using the provided memory allocator.
-// The created list builder will create a list whose elements will be of type etype.
-func NewFixedSizeListBuilder(mem memory.Allocator, n int32, etype arrow.DataType) *FixedSizeListBuilder {
-	return &FixedSizeListBuilder{
-		builder: builder{refCount: 1, mem: mem},
-		etype:   etype,
-		n:       n,
-		values:  NewBuilder(mem, etype),
-	}
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-func (b *FixedSizeListBuilder) Release() {
-	debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&b.refCount, -1) == 0 {
-		if b.nullBitmap != nil {
-			b.nullBitmap.Release()
-			b.nullBitmap = nil
-		}
-		if b.values != nil {
-			b.values.Release()
-			b.values = nil
-		}
-	}
-}
-
-func (b *FixedSizeListBuilder) Append(v bool) {
-	b.Reserve(1)
-	b.unsafeAppendBoolToBitmap(v)
-}
-
-func (b *FixedSizeListBuilder) AppendNull() {
-	b.Reserve(1)
-	b.unsafeAppendBoolToBitmap(false)
-}
-
-func (b *FixedSizeListBuilder) AppendValues(valid []bool) {
-	b.Reserve(len(valid))
-	b.builder.unsafeAppendBoolsToBitmap(valid, len(valid))
-}
-
-func (b *FixedSizeListBuilder) unsafeAppend(v bool) {
-	bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	b.length++
-}
-
-func (b *FixedSizeListBuilder) unsafeAppendBoolToBitmap(isValid bool) {
-	if isValid {
-		bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	} else {
-		b.nulls++
-	}
-	b.length++
-}
-
-func (b *FixedSizeListBuilder) init(capacity int) {
-	b.builder.init(capacity)
-}
-
-// Reserve ensures there is enough space for appending n elements
-// by checking the capacity and calling Resize if necessary.
-func (b *FixedSizeListBuilder) Reserve(n int) {
-	b.builder.reserve(n, b.Resize)
-}
-
-// Resize adjusts the space allocated by b to n elements. If n is greater than b.Cap(),
-// additional memory will be allocated. If n is smaller, the allocated memory may reduced.
-func (b *FixedSizeListBuilder) Resize(n int) {
-	if n < minBuilderCapacity {
-		n = minBuilderCapacity
-	}
-
-	if b.capacity == 0 {
-		b.init(n)
-	} else {
-		b.builder.resize(n, b.builder.init)
-	}
-}
-
-func (b *FixedSizeListBuilder) ValueBuilder() Builder {
-	return b.values
-}
-
-// NewArray creates a List array from the memory buffers used by the builder and resets the FixedSizeListBuilder
-// so it can be used to build a new array.
-func (b *FixedSizeListBuilder) NewArray() Interface {
-	return b.NewListArray()
-}
-
-// NewListArray creates a List array from the memory buffers used by the builder and resets the FixedSizeListBuilder
-// so it can be used to build a new array.
-func (b *FixedSizeListBuilder) NewListArray() (a *FixedSizeList) {
-	data := b.newData()
-	a = NewFixedSizeListData(data)
-	data.Release()
-	return
-}
-
-func (b *FixedSizeListBuilder) newData() (data *Data) {
-	values := b.values.NewArray()
-	defer values.Release()
-
-	data = NewData(
-		arrow.FixedSizeListOf(b.n, b.etype), b.length,
-		[]*memory.Buffer{b.nullBitmap},
-		[]*Data{values.Data()},
-		b.nulls,
-		0,
-	)
-	b.reset()
-
-	return
-}
-
-var (
-	_ Interface = (*FixedSizeList)(nil)
-	_ Builder   = (*FixedSizeListBuilder)(nil)
-)
diff --git a/go/arrow/array/fixed_size_list_test.go b/go/arrow/array/fixed_size_list_test.go
deleted file mode 100644
index d72bc0b..0000000
--- a/go/arrow/array/fixed_size_list_test.go
+++ /dev/null
@@ -1,215 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array_test
-
-import (
-	"reflect"
-	"testing"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-func TestFixedSizeListArray(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	var (
-		vs      = []int32{0, 1, 2, 3, 4, 5, 6}
-		lengths = []int{3, 0, 4}
-		isValid = []bool{true, false, true}
-	)
-
-	lb := array.NewFixedSizeListBuilder(pool, int32(len(vs)), arrow.PrimitiveTypes.Int32)
-	defer lb.Release()
-
-	for i := 0; i < 10; i++ {
-		vb := lb.ValueBuilder().(*array.Int32Builder)
-		vb.Reserve(len(vs))
-
-		pos := 0
-		for i, length := range lengths {
-			lb.Append(isValid[i])
-			for j := 0; j < length; j++ {
-				vb.Append(vs[pos])
-				pos++
-			}
-		}
-
-		arr := lb.NewArray().(*array.FixedSizeList)
-		defer arr.Release()
-
-		arr.Retain()
-		arr.Release()
-
-		if got, want := arr.DataType().ID(), arrow.FIXED_SIZE_LIST; got != want {
-			t.Fatalf("got=%v, want=%v", got, want)
-		}
-
-		if got, want := arr.Len(), len(isValid); got != want {
-			t.Fatalf("got=%d, want=%d", got, want)
-		}
-
-		for i := range lengths {
-			if got, want := arr.IsValid(i), isValid[i]; got != want {
-				t.Fatalf("got[%d]=%v, want[%d]=%v", i, got, i, want)
-			}
-			if got, want := arr.IsNull(i), lengths[i] == 0; got != want {
-				t.Fatalf("got[%d]=%v, want[%d]=%v", i, got, i, want)
-			}
-		}
-
-		varr := arr.ListValues().(*array.Int32)
-		if got, want := varr.Int32Values(), vs; !reflect.DeepEqual(got, want) {
-			t.Fatalf("got=%v, want=%v", got, want)
-		}
-	}
-}
-
-func TestFixedSizeListArrayEmpty(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	lb := array.NewFixedSizeListBuilder(pool, 3, arrow.PrimitiveTypes.Int32)
-	defer lb.Release()
-	arr := lb.NewArray().(*array.FixedSizeList)
-	defer arr.Release()
-	if got, want := arr.Len(), 0; got != want {
-		t.Fatalf("got=%d, want=%d", got, want)
-	}
-}
-
-func TestFixedSizeListArrayBulkAppend(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	var (
-		vs      = []int32{0, 1, 2, 3, 4, 5, 6}
-		lengths = []int{3, 0, 4}
-		isValid = []bool{true, false, true}
-	)
-
-	lb := array.NewFixedSizeListBuilder(pool, int32(len(vs)), arrow.PrimitiveTypes.Int32)
-	defer lb.Release()
-	vb := lb.ValueBuilder().(*array.Int32Builder)
-	vb.Reserve(len(vs))
-
-	lb.AppendValues(isValid)
-	for _, v := range vs {
-		vb.Append(v)
-	}
-
-	arr := lb.NewArray().(*array.FixedSizeList)
-	defer arr.Release()
-
-	if got, want := arr.DataType().ID(), arrow.FIXED_SIZE_LIST; got != want {
-		t.Fatalf("got=%v, want=%v", got, want)
-	}
-
-	if got, want := arr.Len(), len(isValid); got != want {
-		t.Fatalf("got=%d, want=%d", got, want)
-	}
-
-	for i := range lengths {
-		if got, want := arr.IsValid(i), isValid[i]; got != want {
-			t.Fatalf("got[%d]=%v, want[%d]=%v", i, got, i, want)
-		}
-		if got, want := arr.IsNull(i), lengths[i] == 0; got != want {
-			t.Fatalf("got[%d]=%v, want[%d]=%v", i, got, i, want)
-		}
-	}
-
-	varr := arr.ListValues().(*array.Int32)
-	if got, want := varr.Int32Values(), vs; !reflect.DeepEqual(got, want) {
-		t.Fatalf("got=%v, want=%v", got, want)
-	}
-}
-
-func TestFixedSizeListArrayStringer(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	const N = 3
-	var (
-		vs      = [][N]int32{{0, 1, 2}, {3, 4, 5}, {6, 7, 8}, {9, -9, -8}}
-		isValid = []bool{true, false, true, true}
-	)
-
-	lb := array.NewFixedSizeListBuilder(pool, N, arrow.PrimitiveTypes.Int32)
-	defer lb.Release()
-
-	vb := lb.ValueBuilder().(*array.Int32Builder)
-	vb.Reserve(len(vs))
-
-	for i, v := range vs {
-		lb.Append(isValid[i])
-		vb.AppendValues(v[:], nil)
-	}
-
-	arr := lb.NewArray().(*array.FixedSizeList)
-	defer arr.Release()
-
-	arr.Retain()
-	arr.Release()
-
-	want := `[[0 1 2] (null) [6 7 8] [9 -9 -8]]`
-	if got, want := arr.String(), want; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-}
-
-func TestFixedSizeListArraySlice(t *testing.T) {
-	pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer pool.AssertSize(t, 0)
-
-	const N = 3
-	var (
-		vs      = [][N]int32{{0, 1, 2}, {3, 4, 5}, {6, 7, 8}, {9, -9, -8}}
-		isValid = []bool{true, false, true, true}
-	)
-
-	lb := array.NewFixedSizeListBuilder(pool, N, arrow.PrimitiveTypes.Int32)
-	defer lb.Release()
-
-	vb := lb.ValueBuilder().(*array.Int32Builder)
-	vb.Reserve(len(vs))
-
-	for i, v := range vs {
-		lb.Append(isValid[i])
-		vb.AppendValues(v[:], nil)
-	}
-
-	arr := lb.NewArray().(*array.FixedSizeList)
-	defer arr.Release()
-
-	arr.Retain()
-	arr.Release()
-
-	want := `[[0 1 2] (null) [6 7 8] [9 -9 -8]]`
-	if got, want := arr.String(), want; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-
-	sub := array.NewSlice(arr, 1, 3).(*array.FixedSizeList)
-	defer sub.Release()
-
-	want = `[(null) [6 7 8]]`
-	if got, want := sub.String(), want; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-}
diff --git a/go/arrow/array/fixedsize_binary.go b/go/arrow/array/fixedsize_binary.go
deleted file mode 100644
index 502fb99..0000000
--- a/go/arrow/array/fixedsize_binary.go
+++ /dev/null
@@ -1,95 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"bytes"
-	"fmt"
-	"strings"
-
-	"github.com/apache/arrow/go/arrow"
-)
-
-// A type which represents an immutable sequence of fixed-length binary strings.
-type FixedSizeBinary struct {
-	array
-
-	valueBytes []byte
-	bytewidth  int32
-}
-
-// NewFixedSizeBinaryData constructs a new fixed-size binary array from data.
-func NewFixedSizeBinaryData(data *Data) *FixedSizeBinary {
-	a := &FixedSizeBinary{bytewidth: int32(data.DataType().(arrow.FixedWidthDataType).BitWidth() / 8)}
-	a.refCount = 1
-	a.setData(data)
-	return a
-}
-
-// Value returns the fixed-size slice at index i. This value should not be mutated.
-func (a *FixedSizeBinary) Value(i int) []byte {
-	i += a.array.data.offset
-	var (
-		bw  = int(a.bytewidth)
-		beg = i * bw
-		end = (i + 1) * bw
-	)
-	return a.valueBytes[beg:end]
-}
-
-func (a *FixedSizeBinary) String() string {
-	o := new(strings.Builder)
-	o.WriteString("[")
-	for i := 0; i < a.Len(); i++ {
-		if i > 0 {
-			o.WriteString(" ")
-		}
-		switch {
-		case a.IsNull(i):
-			o.WriteString("(null)")
-		default:
-			fmt.Fprintf(o, "%q", a.Value(i))
-		}
-	}
-	o.WriteString("]")
-	return o.String()
-}
-
-func (a *FixedSizeBinary) setData(data *Data) {
-	a.array.setData(data)
-	vals := data.buffers[1]
-	if vals != nil {
-		a.valueBytes = vals.Bytes()
-	}
-
-}
-
-func arrayEqualFixedSizeBinary(left, right *FixedSizeBinary) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		if bytes.Compare(left.Value(i), right.Value(i)) != 0 {
-			return false
-		}
-	}
-	return true
-}
-
-var (
-	_ Interface = (*FixedSizeBinary)(nil)
-)
diff --git a/go/arrow/array/fixedsize_binary_test.go b/go/arrow/array/fixedsize_binary_test.go
deleted file mode 100644
index 4d2d724..0000000
--- a/go/arrow/array/fixedsize_binary_test.go
+++ /dev/null
@@ -1,111 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array_test
-
-import (
-	"testing"
-
-	"github.com/stretchr/testify/assert"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-func TestFixedSizeBinary(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	dtype := arrow.FixedSizeBinaryType{ByteWidth: 7}
-	b := array.NewFixedSizeBinaryBuilder(mem, &dtype)
-
-	zero := make([]byte, dtype.ByteWidth)
-
-	values := [][]byte{
-		[]byte("7654321"),
-		nil,
-		[]byte("AZERTYU"),
-	}
-	valid := []bool{true, false, true}
-	b.AppendValues(values, valid)
-
-	b.Retain()
-	b.Release()
-
-	a := b.NewFixedSizeBinaryArray()
-	assert.Equal(t, 3, a.Len())
-	assert.Equal(t, 1, a.NullN())
-	assert.Equal(t, []byte("7654321"), a.Value(0))
-	assert.Equal(t, zero, a.Value(1))
-	assert.Equal(t, true, a.IsNull(1))
-	assert.Equal(t, false, a.IsValid(1))
-	assert.Equal(t, []byte("AZERTYU"), a.Value(2))
-	a.Release()
-
-	// Test builder reset and NewArray API.
-	b.AppendValues(values, valid)
-	a = b.NewArray().(*array.FixedSizeBinary)
-	assert.Equal(t, 3, a.Len())
-	assert.Equal(t, 1, a.NullN())
-	assert.Equal(t, []byte("7654321"), a.Value(0))
-	assert.Equal(t, zero, a.Value(1))
-	assert.Equal(t, []byte("AZERTYU"), a.Value(2))
-	a.Release()
-
-	b.Release()
-}
-
-func TestFixedSizeBinarySlice(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	dtype := &arrow.FixedSizeBinaryType{ByteWidth: 4}
-	b := array.NewFixedSizeBinaryBuilder(mem, dtype)
-	defer b.Release()
-
-	var data = [][]byte{
-		[]byte("ABCD"),
-		[]byte("1234"),
-		nil,
-		[]byte("AZER"),
-	}
-	b.AppendValues(data[:2], nil)
-	b.AppendNull()
-	b.Append(data[3])
-
-	arr := b.NewFixedSizeBinaryArray()
-	defer arr.Release()
-
-	slice := array.NewSliceData(arr.Data(), 2, 4)
-	defer slice.Release()
-
-	sub1 := array.MakeFromData(slice)
-	defer sub1.Release()
-
-	v, ok := sub1.(*array.FixedSizeBinary)
-	if !ok {
-		t.Fatalf("could not type-assert to array.String")
-	}
-
-	if got, want := v.String(), `[(null) "AZER"]`; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-
-	if got, want := v.NullN(), 1; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-}
diff --git a/go/arrow/array/fixedsize_binarybuilder.go b/go/arrow/array/fixedsize_binarybuilder.go
deleted file mode 100644
index 8a2f65f..0000000
--- a/go/arrow/array/fixedsize_binarybuilder.go
+++ /dev/null
@@ -1,154 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"fmt"
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-// A FixedSizeBinaryBuilder is used to build a FixedSizeBinary array using the Append methods.
-type FixedSizeBinaryBuilder struct {
-	builder
-
-	dtype  *arrow.FixedSizeBinaryType
-	values *byteBufferBuilder
-}
-
-func NewFixedSizeBinaryBuilder(mem memory.Allocator, dtype *arrow.FixedSizeBinaryType) *FixedSizeBinaryBuilder {
-	b := &FixedSizeBinaryBuilder{
-		builder: builder{refCount: 1, mem: mem},
-		dtype:   dtype,
-		values:  newByteBufferBuilder(mem),
-	}
-	return b
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-// Release may be called simultaneously from multiple goroutines.
-func (b *FixedSizeBinaryBuilder) Release() {
-	debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&b.refCount, -1) == 0 {
-		if b.nullBitmap != nil {
-			b.nullBitmap.Release()
-			b.nullBitmap = nil
-		}
-		if b.values != nil {
-			b.values.Release()
-			b.values = nil
-		}
-	}
-}
-
-func (b *FixedSizeBinaryBuilder) Append(v []byte) {
-	if len(v) != b.dtype.ByteWidth {
-		// TODO(alexandre): should we return an error instead?
-		panic("len(v) != b.dtype.ByteWidth")
-	}
-
-	b.Reserve(1)
-	b.values.Append(v)
-	b.UnsafeAppendBoolToBitmap(true)
-}
-
-func (b *FixedSizeBinaryBuilder) AppendNull() {
-	b.Reserve(1)
-	b.values.Advance(b.dtype.ByteWidth)
-	b.UnsafeAppendBoolToBitmap(false)
-}
-
-// AppendValues will append the values in the v slice. The valid slice determines which values
-// in v are valid (not null). The valid slice must either be empty or be equal in length to v. If empty,
-// all values in v are appended and considered valid.
-func (b *FixedSizeBinaryBuilder) AppendValues(v [][]byte, valid []bool) {
-	if len(v) != len(valid) && len(valid) != 0 {
-		panic("len(v) != len(valid) && len(valid) != 0")
-	}
-
-	if len(v) == 0 {
-		return
-	}
-
-	b.Reserve(len(v))
-	for _, vv := range v {
-		switch len(vv) {
-		case 0:
-			b.values.Advance(b.dtype.ByteWidth)
-		case b.dtype.ByteWidth:
-			b.values.Append(vv)
-		default:
-			panic(fmt.Errorf("array: invalid binary length (got=%d, want=%d)", len(vv), b.dtype.ByteWidth))
-		}
-	}
-
-	b.builder.unsafeAppendBoolsToBitmap(valid, len(v))
-}
-
-func (b *FixedSizeBinaryBuilder) init(capacity int) {
-	b.builder.init(capacity)
-	b.values.resize(capacity * b.dtype.ByteWidth)
-}
-
-// Reserve ensures there is enough space for appending n elements
-// by checking the capacity and calling Resize if necessary.
-func (b *FixedSizeBinaryBuilder) Reserve(n int) {
-	b.builder.reserve(n, b.Resize)
-}
-
-// Resize adjusts the space allocated by b to n elements. If n is greater than b.Cap(),
-// additional memory will be allocated. If n is smaller, the allocated memory may reduced.
-func (b *FixedSizeBinaryBuilder) Resize(n int) {
-	b.builder.resize(n, b.init)
-}
-
-// NewArray creates a FixedSizeBinary array from the memory buffers used by the
-// builder and resets the FixedSizeBinaryBuilder so it can be used to build a new array.
-func (b *FixedSizeBinaryBuilder) NewArray() Interface {
-	return b.NewFixedSizeBinaryArray()
-}
-
-// NewFixedSizeBinaryArray creates a FixedSizeBinary array from the memory buffers used by the builder and resets the FixedSizeBinaryBuilder
-// so it can be used to build a new array.
-func (b *FixedSizeBinaryBuilder) NewFixedSizeBinaryArray() (a *FixedSizeBinary) {
-	data := b.newData()
-	a = NewFixedSizeBinaryData(data)
-	data.Release()
-	return
-}
-
-func (b *FixedSizeBinaryBuilder) newData() (data *Data) {
-	values := b.values.Finish()
-	data = NewData(b.dtype, b.length, []*memory.Buffer{b.nullBitmap, values}, nil, b.nulls, 0)
-
-	if values != nil {
-		values.Release()
-	}
-
-	b.builder.reset()
-
-	return
-}
-
-var (
-	_ Builder = (*FixedSizeBinaryBuilder)(nil)
-)
diff --git a/go/arrow/array/fixedsize_binarybuilder_test.go b/go/arrow/array/fixedsize_binarybuilder_test.go
deleted file mode 100644
index 08740c5..0000000
--- a/go/arrow/array/fixedsize_binarybuilder_test.go
+++ /dev/null
@@ -1,107 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"testing"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/memory"
-	"github.com/stretchr/testify/assert"
-)
-
-func TestFixedSizeBinaryBuilder(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	dtype := arrow.FixedSizeBinaryType{ByteWidth: 7}
-	b := NewFixedSizeBinaryBuilder(mem, &dtype)
-
-	b.Append([]byte("1234567"))
-	b.AppendNull()
-	b.Append([]byte("ABCDEFG"))
-	b.AppendNull()
-
-	assert.Equal(t, 4, b.Len(), "unexpected Len()")
-	assert.Equal(t, 2, b.NullN(), "unexpected NullN()")
-
-	values := [][]byte{
-		[]byte("7654321"),
-		nil,
-		[]byte("AZERTYU"),
-	}
-	b.AppendValues(values, []bool{true, false, true})
-
-	assert.Equal(t, 7, b.Len(), "unexpected Len()")
-	assert.Equal(t, 3, b.NullN(), "unexpected NullN()")
-
-	a := b.NewFixedSizeBinaryArray()
-
-	// check state of builder after NewFixedSizeBinaryArray
-	assert.Zero(t, b.Len(), "unexpected ArrayBuilder.Len(), NewFixedSizeBinaryArray did not reset state")
-	assert.Zero(t, b.Cap(), "unexpected ArrayBuilder.Cap(), NewFixedSizeBinaryArray did not reset state")
-	assert.Zero(t, b.NullN(), "unexpected ArrayBuilder.NullN(), NewFixedSizeBinaryArray did not reset state")
-	assert.Equal(t, a.String(), `["1234567" (null) "ABCDEFG" (null) "7654321" (null) "AZERTYU"]`)
-
-	b.Release()
-	a.Release()
-}
-
-func TestFixedSizeBinaryBuilder_Empty(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	dtype := arrow.FixedSizeBinaryType{ByteWidth: 7}
-	ab := NewFixedSizeBinaryBuilder(mem, &dtype)
-	defer ab.Release()
-
-	want := [][]byte{
-		[]byte("1234567"),
-		[]byte("AZERTYU"),
-		[]byte("7654321"),
-	}
-
-	fixedSizeValues := func(a *FixedSizeBinary) [][]byte {
-		vs := make([][]byte, a.Len())
-		for i := range vs {
-			vs[i] = a.Value(i)
-		}
-		return vs
-	}
-
-	ab.AppendValues([][]byte{}, nil)
-	a := ab.NewFixedSizeBinaryArray()
-	assert.Zero(t, a.Len())
-	a.Release()
-
-	ab.AppendValues(nil, nil)
-	a = ab.NewFixedSizeBinaryArray()
-	assert.Zero(t, a.Len())
-	a.Release()
-
-	ab.AppendValues([][]byte{}, nil)
-	ab.AppendValues(want, nil)
-	a = ab.NewFixedSizeBinaryArray()
-	assert.Equal(t, want, fixedSizeValues(a))
-	a.Release()
-
-	ab.AppendValues(want, nil)
-	ab.AppendValues([][]byte{}, nil)
-	a = ab.NewFixedSizeBinaryArray()
-	assert.Equal(t, want, fixedSizeValues(a))
-	a.Release()
-}
diff --git a/go/arrow/array/float16.go b/go/arrow/array/float16.go
deleted file mode 100644
index 931e2d9..0000000
--- a/go/arrow/array/float16.go
+++ /dev/null
@@ -1,87 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"fmt"
-	"strings"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/float16"
-)
-
-// A type which represents an immutable sequence of Float16 values.
-type Float16 struct {
-	array
-	values []float16.Num
-}
-
-func NewFloat16Data(data *Data) *Float16 {
-	a := &Float16{}
-	a.refCount = 1
-	a.setData(data)
-	return a
-}
-
-func (a *Float16) Value(i int) float16.Num { return a.values[i] }
-
-func (a *Float16) Values() []float16.Num { return a.values }
-
-func (a *Float16) String() string {
-	o := new(strings.Builder)
-	o.WriteString("[")
-	for i := 0; i < a.Len(); i++ {
-		if i > 0 {
-			fmt.Fprintf(o, " ")
-		}
-		switch {
-		case a.IsNull(i):
-			o.WriteString("(null)")
-		default:
-			fmt.Fprintf(o, "%v", a.values[i].Float32())
-		}
-	}
-	o.WriteString("]")
-	return o.String()
-}
-
-func (a *Float16) setData(data *Data) {
-	a.array.setData(data)
-	vals := data.buffers[1]
-	if vals != nil {
-		a.values = arrow.Float16Traits.CastFromBytes(vals.Bytes())
-		beg := a.array.data.offset
-		end := beg + a.array.data.length
-		a.values = a.values[beg:end]
-	}
-}
-
-func arrayEqualFloat16(left, right *Float16) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		if left.Value(i) != right.Value(i) {
-			return false
-		}
-	}
-	return true
-}
-
-var (
-	_ Interface = (*Float16)(nil)
-)
diff --git a/go/arrow/array/float16_builder.go b/go/arrow/array/float16_builder.go
deleted file mode 100644
index 8086427..0000000
--- a/go/arrow/array/float16_builder.go
+++ /dev/null
@@ -1,165 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/float16"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-type Float16Builder struct {
-	builder
-
-	data    *memory.Buffer
-	rawData []float16.Num
-}
-
-func NewFloat16Builder(mem memory.Allocator) *Float16Builder {
-	return &Float16Builder{builder: builder{refCount: 1, mem: mem}}
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-func (b *Float16Builder) Release() {
-	debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&b.refCount, -1) == 0 {
-		if b.nullBitmap != nil {
-			b.nullBitmap.Release()
-			b.nullBitmap = nil
-		}
-		if b.data != nil {
-			b.data.Release()
-			b.data = nil
-			b.rawData = nil
-		}
-	}
-}
-
-func (b *Float16Builder) Append(v float16.Num) {
-	b.Reserve(1)
-	b.UnsafeAppend(v)
-}
-
-func (b *Float16Builder) UnsafeAppend(v float16.Num) {
-	bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	b.rawData[b.length] = v
-	b.length++
-}
-
-func (b *Float16Builder) AppendNull() {
-	b.Reserve(1)
-	b.UnsafeAppendBoolToBitmap(false)
-}
-
-func (b *Float16Builder) UnsafeAppendBoolToBitmap(isValid bool) {
-	if isValid {
-		bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	} else {
-		b.nulls++
-	}
-	b.length++
-}
-
-// AppendValues will append the values in the v slice. The valid slice determines which values
-// in v are valid (not null). The valid slice must either be empty or be equal in length to v. If empty,
-// all values in v are appended and considered valid.
-func (b *Float16Builder) AppendValues(v []float16.Num, valid []bool) {
-	if len(v) != len(valid) && len(valid) != 0 {
-		panic("len(v) != len(valid) && len(valid) != 0")
-	}
-
-	if len(v) == 0 {
-		return
-	}
-
-	b.Reserve(len(v))
-	if len(v) > 0 {
-		arrow.Float16Traits.Copy(b.rawData[b.length:], v)
-	}
-	b.builder.unsafeAppendBoolsToBitmap(valid, len(v))
-}
-
-func (b *Float16Builder) init(capacity int) {
-	b.builder.init(capacity)
-
-	b.data = memory.NewResizableBuffer(b.mem)
-	bytesN := arrow.Uint16Traits.BytesRequired(capacity)
-	b.data.Resize(bytesN)
-	b.rawData = arrow.Float16Traits.CastFromBytes(b.data.Bytes())
-}
-
-// Reserve ensures there is enough space for appending n elements
-// by checking the capacity and calling Resize if necessary.
-func (b *Float16Builder) Reserve(n int) {
-	b.builder.reserve(n, b.Resize)
-}
-
-// Resize adjusts the space allocated by b to n elements. If n is greater than b.Cap(),
-// additional memory will be allocated. If n is smaller, the allocated memory may reduced.
-func (b *Float16Builder) Resize(n int) {
-	nBuilder := n
-	if n < minBuilderCapacity {
-		n = minBuilderCapacity
-	}
-
-	if b.capacity == 0 {
-		b.init(n)
-	} else {
-		b.builder.resize(nBuilder, b.init)
-		b.data.Resize(arrow.Float16Traits.BytesRequired(n))
-		b.rawData = arrow.Float16Traits.CastFromBytes(b.data.Bytes())
-	}
-}
-
-// NewArray creates a Float16 array from the memory buffers used by the builder and resets the Float16Builder
-// so it can be used to build a new array.
-func (b *Float16Builder) NewArray() Interface {
-	return b.NewFloat16Array()
-}
-
-// NewFloat16Array creates a Float16 array from the memory buffers used by the builder and resets the Float16Builder
-// so it can be used to build a new array.
-func (b *Float16Builder) NewFloat16Array() (a *Float16) {
-	data := b.newData()
-	a = NewFloat16Data(data)
-	data.Release()
-	return
-}
-
-func (b *Float16Builder) newData() (data *Data) {
-	bytesRequired := arrow.Float16Traits.BytesRequired(b.length)
-	if bytesRequired > 0 && bytesRequired < b.data.Len() {
-		// trim buffers
-		b.data.Resize(bytesRequired)
-	}
-	data = NewData(arrow.FixedWidthTypes.Float16, b.length, []*memory.Buffer{b.nullBitmap, b.data}, nil, b.nulls, 0)
-	b.reset()
-
-	if b.data != nil {
-		b.data.Release()
-		b.data = nil
-		b.rawData = nil
-	}
-
-	return
-}
diff --git a/go/arrow/array/float16_builder_test.go b/go/arrow/array/float16_builder_test.go
deleted file mode 100644
index 23d25d2..0000000
--- a/go/arrow/array/float16_builder_test.go
+++ /dev/null
@@ -1,119 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array_test
-
-import (
-	"testing"
-
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/float16"
-	"github.com/apache/arrow/go/arrow/memory"
-	"github.com/stretchr/testify/assert"
-)
-
-func float32Values(a *array.Float16) []float32 {
-	values := make([]float32, a.Len())
-	for i, v := range a.Values() {
-		values[i] = v.Float32()
-	}
-	return values
-}
-
-func TestNewFloat16Builder(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	ab := array.NewFloat16Builder(mem)
-
-	ab.Append(float16.New(1))
-	ab.Append(float16.New(2))
-	ab.Append(float16.New(3))
-	ab.AppendNull()
-	ab.Append(float16.New(5))
-	ab.Append(float16.New(6))
-	ab.AppendNull()
-	ab.Append(float16.New(8))
-	ab.Append(float16.New(9))
-	ab.Append(float16.New(10))
-
-	// check state of builder before NewFloat16Array
-	assert.Equal(t, 10, ab.Len(), "unexpected Len()")
-	assert.Equal(t, 2, ab.NullN(), "unexpected NullN()")
-
-	a := ab.NewFloat16Array()
-
-	// check state of builder after NewFloat16Array
-	assert.Zero(t, ab.Len(), "unexpected ArrayBuilder.Len(), NewFloat16Array did not reset state")
-	assert.Zero(t, ab.Cap(), "unexpected ArrayBuilder.Cap(), NewFloat16Array did not reset state")
-	assert.Zero(t, ab.NullN(), "unexpected ArrayBuilder.NullN(), NewFloat16Array did not reset state")
-
-	// check state of array
-	assert.Equal(t, 2, a.NullN(), "unexpected null count")
-
-	assert.Equal(t, []float32{1, 2, 3, 0, 5, 6, 0, 8, 9, 10}, float32Values(a), "unexpected Float16Values")
-	assert.Equal(t, []byte{0xb7}, a.NullBitmapBytes()[:1]) // 4 bytes due to minBuilderCapacity
-	assert.Len(t, a.Values(), 10, "unexpected length of Float16Values")
-
-	a.Release()
-	ab.Append(float16.New(7))
-	ab.Append(float16.New(8))
-
-	a = ab.NewFloat16Array()
-
-	assert.Equal(t, 0, a.NullN())
-	assert.Equal(t, []float32{7, 8}, float32Values(a))
-	assert.Len(t, a.Values(), 2)
-
-	a.Release()
-}
-
-func TestFloat16Builder_Empty(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	ab := array.NewFloat16Builder(mem)
-	defer ab.Release()
-
-	want := []float16.Num{float16.New(3), float16.New(4)}
-
-	ab.AppendValues([]float16.Num{}, nil)
-	a := ab.NewFloat16Array()
-	assert.Zero(t, a.Len())
-	a.Release()
-
-	ab.AppendValues(nil, nil)
-	a = ab.NewFloat16Array()
-	assert.Zero(t, a.Len())
-	a.Release()
-
-	ab.AppendValues(want, nil)
-	a = ab.NewFloat16Array()
-	assert.Equal(t, want, a.Values())
-	a.Release()
-
-	ab.AppendValues([]float16.Num{}, nil)
-	ab.AppendValues(want, nil)
-	a = ab.NewFloat16Array()
-	assert.Equal(t, want, a.Values())
-	a.Release()
-
-	ab.AppendValues(want, nil)
-	ab.AppendValues([]float16.Num{}, nil)
-	a = ab.NewFloat16Array()
-	assert.Equal(t, want, a.Values())
-	a.Release()
-}
diff --git a/go/arrow/array/interval.go b/go/arrow/array/interval.go
deleted file mode 100644
index d313558..0000000
--- a/go/arrow/array/interval.go
+++ /dev/null
@@ -1,434 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array // import "github.com/apache/arrow/go/arrow/array"
-
-import (
-	"fmt"
-	"strings"
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-	"github.com/apache/arrow/go/arrow/memory"
-	"golang.org/x/xerrors"
-)
-
-func NewIntervalData(data *Data) Interface {
-	switch data.dtype.(type) {
-	case *arrow.MonthIntervalType:
-		return NewMonthIntervalData(data)
-	case *arrow.DayTimeIntervalType:
-		return NewDayTimeIntervalData(data)
-	default:
-		panic(xerrors.Errorf("arrow/array: unknown interval data type %T", data.dtype))
-	}
-}
-
-// A type which represents an immutable sequence of arrow.MonthInterval values.
-type MonthInterval struct {
-	array
-	values []arrow.MonthInterval
-}
-
-func NewMonthIntervalData(data *Data) *MonthInterval {
-	a := &MonthInterval{}
-	a.refCount = 1
-	a.setData(data)
-	return a
-}
-
-func (a *MonthInterval) Value(i int) arrow.MonthInterval            { return a.values[i] }
-func (a *MonthInterval) MonthIntervalValues() []arrow.MonthInterval { return a.values }
-
-func (a *MonthInterval) String() string {
-	o := new(strings.Builder)
-	o.WriteString("[")
-	for i, v := range a.values {
-		if i > 0 {
-			fmt.Fprintf(o, " ")
-		}
-		switch {
-		case a.IsNull(i):
-			o.WriteString("(null)")
-		default:
-			fmt.Fprintf(o, "%v", v)
-		}
-	}
-	o.WriteString("]")
-	return o.String()
-}
-
-func (a *MonthInterval) setData(data *Data) {
-	a.array.setData(data)
-	vals := data.buffers[1]
-	if vals != nil {
-		a.values = arrow.MonthIntervalTraits.CastFromBytes(vals.Bytes())
-		beg := a.array.data.offset
-		end := beg + a.array.data.length
-		a.values = a.values[beg:end]
-	}
-}
-
-func arrayEqualMonthInterval(left, right *MonthInterval) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		if left.Value(i) != right.Value(i) {
-			return false
-		}
-	}
-	return true
-}
-
-type MonthIntervalBuilder struct {
-	builder
-
-	data    *memory.Buffer
-	rawData []arrow.MonthInterval
-}
-
-func NewMonthIntervalBuilder(mem memory.Allocator) *MonthIntervalBuilder {
-	return &MonthIntervalBuilder{builder: builder{refCount: 1, mem: mem}}
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-func (b *MonthIntervalBuilder) Release() {
-	debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&b.refCount, -1) == 0 {
-		if b.nullBitmap != nil {
-			b.nullBitmap.Release()
-			b.nullBitmap = nil
-		}
-		if b.data != nil {
-			b.data.Release()
-			b.data = nil
-			b.rawData = nil
-		}
-	}
-}
-
-func (b *MonthIntervalBuilder) Append(v arrow.MonthInterval) {
-	b.Reserve(1)
-	b.UnsafeAppend(v)
-}
-
-func (b *MonthIntervalBuilder) AppendNull() {
-	b.Reserve(1)
-	b.UnsafeAppendBoolToBitmap(false)
-}
-
-func (b *MonthIntervalBuilder) UnsafeAppend(v arrow.MonthInterval) {
-	bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	b.rawData[b.length] = v
-	b.length++
-}
-
-func (b *MonthIntervalBuilder) UnsafeAppendBoolToBitmap(isValid bool) {
-	if isValid {
-		bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	} else {
-		b.nulls++
-	}
-	b.length++
-}
-
-// AppendValues will append the values in the v slice. The valid slice determines which values
-// in v are valid (not null). The valid slice must either be empty or be equal in length to v. If empty,
-// all values in v are appended and considered valid.
-func (b *MonthIntervalBuilder) AppendValues(v []arrow.MonthInterval, valid []bool) {
-	if len(v) != len(valid) && len(valid) != 0 {
-		panic("len(v) != len(valid) && len(valid) != 0")
-	}
-
-	if len(v) == 0 {
-		return
-	}
-
-	b.Reserve(len(v))
-	arrow.MonthIntervalTraits.Copy(b.rawData[b.length:], v)
-	b.builder.unsafeAppendBoolsToBitmap(valid, len(v))
-}
-
-func (b *MonthIntervalBuilder) init(capacity int) {
-	b.builder.init(capacity)
-
-	b.data = memory.NewResizableBuffer(b.mem)
-	bytesN := arrow.MonthIntervalTraits.BytesRequired(capacity)
-	b.data.Resize(bytesN)
-	b.rawData = arrow.MonthIntervalTraits.CastFromBytes(b.data.Bytes())
-}
-
-// Reserve ensures there is enough space for appending n elements
-// by checking the capacity and calling Resize if necessary.
-func (b *MonthIntervalBuilder) Reserve(n int) {
-	b.builder.reserve(n, b.Resize)
-}
-
-// Resize adjusts the space allocated by b to n elements. If n is greater than b.Cap(),
-// additional memory will be allocated. If n is smaller, the allocated memory may reduced.
-func (b *MonthIntervalBuilder) Resize(n int) {
-	nBuilder := n
-	if n < minBuilderCapacity {
-		n = minBuilderCapacity
-	}
-
-	if b.capacity == 0 {
-		b.init(n)
-	} else {
-		b.builder.resize(nBuilder, b.init)
-		b.data.Resize(arrow.MonthIntervalTraits.BytesRequired(n))
-		b.rawData = arrow.MonthIntervalTraits.CastFromBytes(b.data.Bytes())
-	}
-}
-
-// NewArray creates a MonthInterval array from the memory buffers used by the builder and resets the MonthIntervalBuilder
-// so it can be used to build a new array.
-func (b *MonthIntervalBuilder) NewArray() Interface {
-	return b.NewMonthIntervalArray()
-}
-
-// NewMonthIntervalArray creates a MonthInterval array from the memory buffers used by the builder and resets the MonthIntervalBuilder
-// so it can be used to build a new array.
-func (b *MonthIntervalBuilder) NewMonthIntervalArray() (a *MonthInterval) {
-	data := b.newData()
-	a = NewMonthIntervalData(data)
-	data.Release()
-	return
-}
-
-func (b *MonthIntervalBuilder) newData() (data *Data) {
-	bytesRequired := arrow.MonthIntervalTraits.BytesRequired(b.length)
-	if bytesRequired > 0 && bytesRequired < b.data.Len() {
-		// trim buffers
-		b.data.Resize(bytesRequired)
-	}
-	data = NewData(arrow.FixedWidthTypes.MonthInterval, b.length, []*memory.Buffer{b.nullBitmap, b.data}, nil, b.nulls, 0)
-	b.reset()
-
-	if b.data != nil {
-		b.data.Release()
-		b.data = nil
-		b.rawData = nil
-	}
-
-	return
-}
-
-// A type which represents an immutable sequence of arrow.DayTimeInterval values.
-type DayTimeInterval struct {
-	array
-	values []arrow.DayTimeInterval
-}
-
-func NewDayTimeIntervalData(data *Data) *DayTimeInterval {
-	a := &DayTimeInterval{}
-	a.refCount = 1
-	a.setData(data)
-	return a
-}
-
-func (a *DayTimeInterval) Value(i int) arrow.DayTimeInterval              { return a.values[i] }
-func (a *DayTimeInterval) DayTimeIntervalValues() []arrow.DayTimeInterval { return a.values }
-
-func (a *DayTimeInterval) String() string {
-	o := new(strings.Builder)
-	o.WriteString("[")
-	for i, v := range a.values {
-		if i > 0 {
-			fmt.Fprintf(o, " ")
-		}
-		switch {
-		case a.IsNull(i):
-			o.WriteString("(null)")
-		default:
-			fmt.Fprintf(o, "%v", v)
-		}
-	}
-	o.WriteString("]")
-	return o.String()
-}
-
-func (a *DayTimeInterval) setData(data *Data) {
-	a.array.setData(data)
-	vals := data.buffers[1]
-	if vals != nil {
-		a.values = arrow.DayTimeIntervalTraits.CastFromBytes(vals.Bytes())
-		beg := a.array.data.offset
-		end := beg + a.array.data.length
-		a.values = a.values[beg:end]
-	}
-}
-
-func arrayEqualDayTimeInterval(left, right *DayTimeInterval) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		if left.Value(i) != right.Value(i) {
-			return false
-		}
-	}
-	return true
-}
-
-type DayTimeIntervalBuilder struct {
-	builder
-
-	data    *memory.Buffer
-	rawData []arrow.DayTimeInterval
-}
-
-func NewDayTimeIntervalBuilder(mem memory.Allocator) *DayTimeIntervalBuilder {
-	return &DayTimeIntervalBuilder{builder: builder{refCount: 1, mem: mem}}
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-func (b *DayTimeIntervalBuilder) Release() {
-	debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&b.refCount, -1) == 0 {
-		if b.nullBitmap != nil {
-			b.nullBitmap.Release()
-			b.nullBitmap = nil
-		}
-		if b.data != nil {
-			b.data.Release()
-			b.data = nil
-			b.rawData = nil
-		}
-	}
-}
-
-func (b *DayTimeIntervalBuilder) Append(v arrow.DayTimeInterval) {
-	b.Reserve(1)
-	b.UnsafeAppend(v)
-}
-
-func (b *DayTimeIntervalBuilder) AppendNull() {
-	b.Reserve(1)
-	b.UnsafeAppendBoolToBitmap(false)
-}
-
-func (b *DayTimeIntervalBuilder) UnsafeAppend(v arrow.DayTimeInterval) {
-	bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	b.rawData[b.length] = v
-	b.length++
-}
-
-func (b *DayTimeIntervalBuilder) UnsafeAppendBoolToBitmap(isValid bool) {
-	if isValid {
-		bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	} else {
-		b.nulls++
-	}
-	b.length++
-}
-
-// AppendValues will append the values in the v slice. The valid slice determines which values
-// in v are valid (not null). The valid slice must either be empty or be equal in length to v. If empty,
-// all values in v are appended and considered valid.
-func (b *DayTimeIntervalBuilder) AppendValues(v []arrow.DayTimeInterval, valid []bool) {
-	if len(v) != len(valid) && len(valid) != 0 {
-		panic("len(v) != len(valid) && len(valid) != 0")
-	}
-
-	if len(v) == 0 {
-		return
-	}
-
-	b.Reserve(len(v))
-	arrow.DayTimeIntervalTraits.Copy(b.rawData[b.length:], v)
-	b.builder.unsafeAppendBoolsToBitmap(valid, len(v))
-}
-
-func (b *DayTimeIntervalBuilder) init(capacity int) {
-	b.builder.init(capacity)
-
-	b.data = memory.NewResizableBuffer(b.mem)
-	bytesN := arrow.DayTimeIntervalTraits.BytesRequired(capacity)
-	b.data.Resize(bytesN)
-	b.rawData = arrow.DayTimeIntervalTraits.CastFromBytes(b.data.Bytes())
-}
-
-// Reserve ensures there is enough space for appending n elements
-// by checking the capacity and calling Resize if necessary.
-func (b *DayTimeIntervalBuilder) Reserve(n int) {
-	b.builder.reserve(n, b.Resize)
-}
-
-// Resize adjusts the space allocated by b to n elements. If n is greater than b.Cap(),
-// additional memory will be allocated. If n is smaller, the allocated memory may reduced.
-func (b *DayTimeIntervalBuilder) Resize(n int) {
-	nBuilder := n
-	if n < minBuilderCapacity {
-		n = minBuilderCapacity
-	}
-
-	if b.capacity == 0 {
-		b.init(n)
-	} else {
-		b.builder.resize(nBuilder, b.init)
-		b.data.Resize(arrow.DayTimeIntervalTraits.BytesRequired(n))
-		b.rawData = arrow.DayTimeIntervalTraits.CastFromBytes(b.data.Bytes())
-	}
-}
-
-// NewArray creates a DayTimeInterval array from the memory buffers used by the builder and resets the DayTimeIntervalBuilder
-// so it can be used to build a new array.
-func (b *DayTimeIntervalBuilder) NewArray() Interface {
-	return b.NewDayTimeIntervalArray()
-}
-
-// NewDayTimeIntervalArray creates a DayTimeInterval array from the memory buffers used by the builder and resets the DayTimeIntervalBuilder
-// so it can be used to build a new array.
-func (b *DayTimeIntervalBuilder) NewDayTimeIntervalArray() (a *DayTimeInterval) {
-	data := b.newData()
-	a = NewDayTimeIntervalData(data)
-	data.Release()
-	return
-}
-
-func (b *DayTimeIntervalBuilder) newData() (data *Data) {
-	bytesRequired := arrow.DayTimeIntervalTraits.BytesRequired(b.length)
-	if bytesRequired > 0 && bytesRequired < b.data.Len() {
-		// trim buffers
-		b.data.Resize(bytesRequired)
-	}
-	data = NewData(arrow.FixedWidthTypes.DayTimeInterval, b.length, []*memory.Buffer{b.nullBitmap, b.data}, nil, b.nulls, 0)
-	b.reset()
-
-	if b.data != nil {
-		b.data.Release()
-		b.data = nil
-		b.rawData = nil
-	}
-
-	return
-}
-
-var (
-	_ Interface = (*MonthInterval)(nil)
-	_ Interface = (*DayTimeInterval)(nil)
-
-	_ Builder = (*MonthIntervalBuilder)(nil)
-	_ Builder = (*DayTimeIntervalBuilder)(nil)
-)
diff --git a/go/arrow/array/interval_test.go b/go/arrow/array/interval_test.go
deleted file mode 100644
index bac0bb4..0000000
--- a/go/arrow/array/interval_test.go
+++ /dev/null
@@ -1,276 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array_test
-
-import (
-	"testing"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/array"
-	"github.com/apache/arrow/go/arrow/memory"
-	"github.com/stretchr/testify/assert"
-)
-
-func TestMonthIntervalArray(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	var (
-		want   = []arrow.MonthInterval{1, 2, 3, 4}
-		valids = []bool{true, true, false, true}
-	)
-
-	b := array.NewMonthIntervalBuilder(mem)
-	defer b.Release()
-
-	b.Retain()
-	b.Release()
-
-	b.AppendValues(want[:2], nil)
-	b.AppendNull()
-	b.Append(want[3])
-
-	if got, want := b.Len(), len(want); got != want {
-		t.Fatalf("invalid len: got=%d, want=%d", got, want)
-	}
-
-	if got, want := b.NullN(), 1; got != want {
-		t.Fatalf("invalid nulls: got=%d, want=%d", got, want)
-	}
-
-	arr := b.NewMonthIntervalArray()
-	defer arr.Release()
-
-	arr.Retain()
-	arr.Release()
-
-	if got, want := arr.Len(), len(want); got != want {
-		t.Fatalf("invalid len: got=%d, want=%d", got, want)
-	}
-
-	if got, want := arr.NullN(), 1; got != want {
-		t.Fatalf("invalid nulls: got=%d, want=%d", got, want)
-	}
-
-	for i := range want {
-		if arr.IsNull(i) != !valids[i] {
-			t.Fatalf("arr[%d]-validity: got=%v want=%v", i, !arr.IsNull(i), valids[i])
-		}
-		switch {
-		case arr.IsNull(i):
-		default:
-			got := arr.Value(i)
-			if got != want[i] {
-				t.Fatalf("arr[%d]: got=%q, want=%q", i, got, want[i])
-			}
-		}
-	}
-
-	sub := array.MakeFromData(arr.Data())
-	defer sub.Release()
-
-	if sub.DataType().ID() != arrow.INTERVAL {
-		t.Fatalf("invalid type: got=%q, want=interval", sub.DataType().Name())
-	}
-
-	if _, ok := sub.(*array.MonthInterval); !ok {
-		t.Fatalf("could not type-assert to array.MonthInterval")
-	}
-
-	if got, want := arr.String(), `[1 2 (null) 4]`; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-	slice := array.NewSliceData(arr.Data(), 2, 4)
-	defer slice.Release()
-
-	sub1 := array.MakeFromData(slice)
-	defer sub1.Release()
-
-	v, ok := sub1.(*array.MonthInterval)
-	if !ok {
-		t.Fatalf("could not type-assert to array.MonthInterval")
-	}
-
-	if got, want := v.String(), `[(null) 4]`; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-}
-
-func TestMonthIntervalBuilder_Empty(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	want := []arrow.MonthInterval{1, 2, 3, 4}
-
-	b := array.NewMonthIntervalBuilder(mem)
-	defer b.Release()
-
-	miValues := func(a *array.MonthInterval) []arrow.MonthInterval {
-		vs := make([]arrow.MonthInterval, a.Len())
-		for i := range vs {
-			vs[i] = a.Value(i)
-		}
-		return vs
-	}
-
-	b.AppendValues([]arrow.MonthInterval{}, nil)
-	arr := b.NewMonthIntervalArray()
-	assert.Zero(t, arr.Len())
-	arr.Release()
-
-	b.AppendValues(nil, nil)
-	arr = b.NewMonthIntervalArray()
-	assert.Zero(t, arr.Len())
-	arr.Release()
-
-	b.AppendValues([]arrow.MonthInterval{}, nil)
-	b.AppendValues(want, nil)
-	arr = b.NewMonthIntervalArray()
-	assert.Equal(t, want, miValues(arr))
-	arr.Release()
-
-	b.AppendValues(want, nil)
-	b.AppendValues([]arrow.MonthInterval{}, nil)
-	arr = b.NewMonthIntervalArray()
-	assert.Equal(t, want, miValues(arr))
-	arr.Release()
-}
-
-func TestDayTimeArray(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	var (
-		want   = []arrow.DayTimeInterval{{1, 1}, {2, 2}, {3, 3}, {4, 4}}
-		valids = []bool{true, true, false, true}
-	)
-
-	b := array.NewDayTimeIntervalBuilder(mem)
-	defer b.Release()
-
-	b.Retain()
-	b.Release()
-
-	b.AppendValues(want[:2], nil)
-	b.AppendNull()
-	b.Append(want[3])
-
-	if got, want := b.Len(), len(want); got != want {
-		t.Fatalf("invalid len: got=%d, want=%d", got, want)
-	}
-
-	if got, want := b.NullN(), 1; got != want {
-		t.Fatalf("invalid nulls: got=%d, want=%d", got, want)
-	}
-
-	arr := b.NewDayTimeIntervalArray()
-	defer arr.Release()
-
-	arr.Retain()
-	arr.Release()
-
-	if got, want := arr.Len(), len(want); got != want {
-		t.Fatalf("invalid len: got=%d, want=%d", got, want)
-	}
-
-	if got, want := arr.NullN(), 1; got != want {
-		t.Fatalf("invalid nulls: got=%d, want=%d", got, want)
-	}
-
-	for i := range want {
-		if arr.IsNull(i) != !valids[i] {
-			t.Fatalf("arr[%d]-validity: got=%v want=%v", i, !arr.IsNull(i), valids[i])
-		}
-		switch {
-		case arr.IsNull(i):
-		default:
-			got := arr.Value(i)
-			if got != want[i] {
-				t.Fatalf("arr[%d]: got=%q, want=%q", i, got, want[i])
-			}
-		}
-	}
-
-	sub := array.MakeFromData(arr.Data())
-	defer sub.Release()
-
-	if sub.DataType().ID() != arrow.INTERVAL {
-		t.Fatalf("invalid type: got=%q, want=interval", sub.DataType().Name())
-	}
-
-	if _, ok := sub.(*array.DayTimeInterval); !ok {
-		t.Fatalf("could not type-assert to array.DayTimeInterval")
-	}
-
-	if got, want := arr.String(), `[{1 1} {2 2} (null) {4 4}]`; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-	slice := array.NewSliceData(arr.Data(), 2, 4)
-	defer slice.Release()
-
-	sub1 := array.MakeFromData(slice)
-	defer sub1.Release()
-
-	v, ok := sub1.(*array.DayTimeInterval)
-	if !ok {
-		t.Fatalf("could not type-assert to array.DayInterval")
-	}
-
-	if got, want := v.String(), `[(null) {4 4}]`; got != want {
-		t.Fatalf("got=%q, want=%q", got, want)
-	}
-}
-
-func TestDayTimeIntervalBuilder_Empty(t *testing.T) {
-	mem := memory.NewCheckedAllocator(memory.NewGoAllocator())
-	defer mem.AssertSize(t, 0)
-
-	want := []arrow.DayTimeInterval{{1, 1}, {2, 2}, {3, 3}, {4, 4}}
-
-	b := array.NewDayTimeIntervalBuilder(mem)
-	defer b.Release()
-
-	dtValues := func(a *array.DayTimeInterval) []arrow.DayTimeInterval {
-		vs := make([]arrow.DayTimeInterval, a.Len())
-		for i := range vs {
-			vs[i] = a.Value(i)
-		}
-		return vs
-	}
-
-	b.AppendValues([]arrow.DayTimeInterval{}, nil)
-	arr := b.NewDayTimeIntervalArray()
-	assert.Zero(t, arr.Len())
-	arr.Release()
-
-	b.AppendValues(nil, nil)
-	arr = b.NewDayTimeIntervalArray()
-	assert.Zero(t, arr.Len())
-	arr.Release()
-
-	b.AppendValues([]arrow.DayTimeInterval{}, nil)
-	b.AppendValues(want, nil)
-	arr = b.NewDayTimeIntervalArray()
-	assert.Equal(t, want, dtValues(arr))
-	arr.Release()
-
-	b.AppendValues(want, nil)
-	b.AppendValues([]arrow.DayTimeInterval{}, nil)
-	arr = b.NewDayTimeIntervalArray()
-	assert.Equal(t, want, dtValues(arr))
-	arr.Release()
-}
diff --git a/go/arrow/array/list.go b/go/arrow/array/list.go
deleted file mode 100644
index 22014c8..0000000
--- a/go/arrow/array/list.go
+++ /dev/null
@@ -1,269 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package array
-
-import (
-	"fmt"
-	"strings"
-	"sync/atomic"
-
-	"github.com/apache/arrow/go/arrow"
-	"github.com/apache/arrow/go/arrow/bitutil"
-	"github.com/apache/arrow/go/arrow/internal/debug"
-	"github.com/apache/arrow/go/arrow/memory"
-)
-
-// List represents an immutable sequence of array values.
-type List struct {
-	array
-	values  Interface
-	offsets []int32
-}
-
-// NewListData returns a new List array value, from data.
-func NewListData(data *Data) *List {
-	a := &List{}
-	a.refCount = 1
-	a.setData(data)
-	return a
-}
-
-func (a *List) ListValues() Interface { return a.values }
-
-func (a *List) String() string {
-	o := new(strings.Builder)
-	o.WriteString("[")
-	for i := 0; i < a.Len(); i++ {
-		if i > 0 {
-			o.WriteString(" ")
-		}
-		if !a.IsValid(i) {
-			o.WriteString("(null)")
-			continue
-		}
-		sub := a.newListValue(i)
-		fmt.Fprintf(o, "%v", sub)
-		sub.Release()
-	}
-	o.WriteString("]")
-	return o.String()
-}
-
-func (a *List) newListValue(i int) Interface {
-	j := i + a.array.data.offset
-	beg := int64(a.offsets[j])
-	end := int64(a.offsets[j+1])
-	return NewSlice(a.values, beg, end)
-}
-
-func (a *List) setData(data *Data) {
-	a.array.setData(data)
-	vals := data.buffers[1]
-	if vals != nil {
-		a.offsets = arrow.Int32Traits.CastFromBytes(vals.Bytes())
-	}
-	a.values = MakeFromData(data.childData[0])
-}
-
-func arrayEqualList(left, right *List) bool {
-	for i := 0; i < left.Len(); i++ {
-		if left.IsNull(i) {
-			continue
-		}
-		o := func() bool {
-			l := left.newListValue(i)
-			defer l.Release()
-			r := right.newListValue(i)
-			defer r.Release()
-			return ArrayEqual(l, r)
-		}()
-		if !o {
-			return false
-		}
-	}
-	return true
-}
-
-// Len returns the number of elements in the array.
-func (a *List) Len() int { return a.array.Len() }
-
-func (a *List) Offsets() []int32 { return a.offsets }
-
-func (a *List) Retain() {
-	a.array.Retain()
-	a.values.Retain()
-}
-
-func (a *List) Release() {
-	a.array.Release()
-	a.values.Release()
-}
-
-type ListBuilder struct {
-	builder
-
-	etype   arrow.DataType // data type of the list's elements.
-	values  Builder        // value builder for the list's elements.
-	offsets *Int32Builder
-}
-
-// NewListBuilder returns a builder, using the provided memory allocator.
-// The created list builder will create a list whose elements will be of type etype.
-func NewListBuilder(mem memory.Allocator, etype arrow.DataType) *ListBuilder {
-	return &ListBuilder{
-		builder: builder{refCount: 1, mem: mem},
-		etype:   etype,
-		values:  NewBuilder(mem, etype),
-		offsets: NewInt32Builder(mem),
-	}
-}
-
-// Release decreases the reference count by 1.
-// When the reference count goes to zero, the memory is freed.
-func (b *ListBuilder) Release() {
-	debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases")
-
-	if atomic.AddInt64(&b.refCount, -1) == 0 {
-		if b.nullBitmap != nil {
-			b.nullBitmap.Release()
-			b.nullBitmap = nil
-		}
-	}
-
-	b.values.Release()
-	b.offsets.Release()
-}
-
-func (b *ListBuilder) appendNextOffset() {
-	b.offsets.Append(int32(b.values.Len()))
-}
-
-func (b *ListBuilder) Append(v bool) {
-	b.Reserve(1)
-	b.unsafeAppendBoolToBitmap(v)
-	b.appendNextOffset()
-}
-
-func (b *ListBuilder) AppendNull() {
-	b.Reserve(1)
-	b.unsafeAppendBoolToBitmap(false)
-	b.appendNextOffset()
-}
-
-func (b *ListBuilder) AppendValues(offsets []int32, valid []bool) {
-	b.Reserve(len(valid))
-	b.offsets.AppendValues(offsets, nil)
-	b.builder.unsafeAppendBoolsToBitmap(valid, len(valid))
-}
-
-func (b *ListBuilder) unsafeAppend(v bool) {
-	bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	b.length++
-}
-
-func (b *ListBuilder) unsafeAppendBoolToBitmap(isValid bool) {
-	if isValid {
-		bitutil.SetBit(b.nullBitmap.Bytes(), b.length)
-	} else {
-		b.nulls++
-	}
-	b.length++
-}
-
-func (b *ListBuilder) init(capacity int) {
-	b.builder.init(capacity)
-	b.offsets.init(capacity + 1)
-}
-
-// Reserve ensures there is enough space for appending n elements
-// by checking the capacity and calling Resize if necessary.
-func (b *ListBuilder) Reserve(n int) {
-	b.builder.reserve(n, b.resizeHelper)
-	b.offsets.Reserve(n)
-}
-
-// Resize adjusts the space allocated by b to n elements. If n is greater than b.Cap(),
-// additional memory will be allocated. If n is smaller, the allocated memory may reduced.
-func (b *ListBuilder) Resize(n int) {
-	b.resizeHelper(n)
-	b.offsets.Resize(n)
-}
-
-func (b *ListBuilder) resizeHelper(n int) {
-	if n < minBuilderCapacity {
-		n = minBuilderCapacity
-	}
-
... 103886 lines suppressed ...

[arrow-rs] 09/14: Removed docs.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit 1030fc2b272c00f64eda550d76e1ff8a652dc269
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:21:46 2021 +0000

    Removed docs.
---
 docs/.gitignore                                    |   19 -
 docs/Makefile                                      |  247 ----
 docs/README.md                                     |   30 -
 docs/environment.yml                               |   25 -
 docs/make.bat                                      |   52 -
 docs/requirements.txt                              |    5 -
 docs/source/_static/arrow.png                      |  Bin 21636 -> 0 bytes
 docs/source/_static/favicon.ico                    |  Bin 15086 -> 0 bytes
 docs/source/_static/theme_overrides.css            |   83 --
 docs/source/_templates/docs-sidebar.html           |   19 -
 docs/source/_templates/layout.html                 |    5 -
 docs/source/conf.py                                |  444 -------
 docs/source/cpp/api.rst                            |   42 -
 docs/source/cpp/api/array.rst                      |   92 --
 docs/source/cpp/api/builder.rst                    |   56 -
 docs/source/cpp/api/c_abi.rst                      |   48 -
 docs/source/cpp/api/compute.rst                    |   56 -
 docs/source/cpp/api/cuda.rst                       |   74 --
 docs/source/cpp/api/dataset.rst                    |   71 --
 docs/source/cpp/api/datatype.rst                   |  151 ---
 docs/source/cpp/api/filesystem.rst                 |   64 --
 docs/source/cpp/api/flight.rst                     |  202 ----
 docs/source/cpp/api/formats.rst                    |   98 --
 docs/source/cpp/api/io.rst                         |   95 --
 docs/source/cpp/api/ipc.rst                        |   90 --
 docs/source/cpp/api/memory.rst                     |  124 --
 docs/source/cpp/api/scalar.rst                     |   38 -
 docs/source/cpp/api/support.rst                    |   57 -
 docs/source/cpp/api/table.rst                      |   45 -
 docs/source/cpp/api/tensor.rst                     |   57 -
 docs/source/cpp/api/utilities.rst                  |   52 -
 docs/source/cpp/arrays.rst                         |  214 ----
 docs/source/cpp/cmake.rst                          |   72 --
 docs/source/cpp/compute.rst                        |  833 --------------
 docs/source/cpp/conventions.rst                    |  107 --
 docs/source/cpp/csv.rst                            |  172 ---
 docs/source/cpp/dataset.rst                        |  403 -------
 docs/source/cpp/datatypes.rst                      |   68 --
 docs/source/cpp/examples/cmake_minimal_build.rst   |   28 -
 .../cpp/examples/dataset_documentation_example.rst |   27 -
 docs/source/cpp/examples/index.rst                 |   27 -
 .../cpp/examples/row_columnar_conversion.rst       |   27 -
 .../source/cpp/examples/tuple_range_conversion.rst |  106 --
 docs/source/cpp/flight.rst                         |  119 --
 docs/source/cpp/getting_started.rst                |   40 -
 docs/source/cpp/index.rst                          |   32 -
 docs/source/cpp/io.rst                             |   87 --
 docs/source/cpp/ipc.rst                            |   75 --
 docs/source/cpp/json.rst                           |  128 ---
 docs/source/cpp/memory.rst                         |  185 ---
 docs/source/cpp/overview.rst                       |   97 --
 docs/source/cpp/parquet.rst                        |  432 -------
 docs/source/cpp/tables.rst                         |   83 --
 docs/source/developers/archery.rst                 |   84 --
 docs/source/developers/benchmarks.rst              |  179 ---
 docs/source/developers/contributing.rst            |  360 ------
 docs/source/developers/cpp/building.rst            |  481 --------
 docs/source/developers/cpp/conventions.rst         |   90 --
 docs/source/developers/cpp/development.rst         |  293 -----
 docs/source/developers/cpp/fuzzing.rst             |   99 --
 docs/source/developers/cpp/index.rst               |   31 -
 docs/source/developers/cpp/windows.rst             |  416 -------
 docs/source/developers/crossbow.rst                |  257 -----
 docs/source/developers/docker.rst                  |  225 ----
 docs/source/developers/documentation.rst           |  103 --
 docs/source/developers/python.rst                  |  575 ---------
 docs/source/example.gz                             |  Bin 41 -> 0 bytes
 docs/source/format/Arrow.graffle                   |  Bin 4142 -> 0 bytes
 docs/source/format/Arrow.png                       |  Bin 112671 -> 0 bytes
 docs/source/format/CDataInterface.rst              |  945 ---------------
 docs/source/format/CStreamInterface.rst            |  218 ----
 docs/source/format/Columnar.rst                    | 1215 --------------------
 docs/source/format/Flight.rst                      |  152 ---
 docs/source/format/Guidelines.rst                  |   24 -
 docs/source/format/IPC.rst                         |   24 -
 docs/source/format/Integration.rst                 |  398 -------
 docs/source/format/Layout.rst                      |   24 -
 docs/source/format/Metadata.rst                    |   24 -
 docs/source/format/Other.rst                       |   63 -
 docs/source/format/README.md                       |   24 -
 docs/source/format/Versioning.rst                  |   70 --
 .../format/integration_json_examples/simple.json   |   98 --
 .../format/integration_json_examples/struct.json   |  201 ----
 docs/source/index.rst                              |   82 --
 docs/source/java/index.rst                         |   30 -
 docs/source/java/ipc.rst                           |  187 ---
 docs/source/java/vector.rst                        |  288 -----
 docs/source/java/vector_schema_root.rst            |   74 --
 docs/source/python/api.rst                         |   40 -
 docs/source/python/api/arrays.rst                  |  122 --
 docs/source/python/api/compute.rst                 |  211 ----
 docs/source/python/api/cuda.rst                    |   62 -
 docs/source/python/api/dataset.rst                 |   60 -
 docs/source/python/api/datatypes.rst               |  155 ---
 docs/source/python/api/files.rst                   |   65 --
 docs/source/python/api/filesystems.rst             |   53 -
 docs/source/python/api/flight.rst                  |   91 --
 docs/source/python/api/formats.rst                 |   97 --
 docs/source/python/api/ipc.rst                     |   68 --
 docs/source/python/api/memory.rst                  |   72 --
 docs/source/python/api/misc.rst                    |   40 -
 docs/source/python/api/plasma.rst                  |   33 -
 docs/source/python/api/tables.rst                  |   55 -
 docs/source/python/benchmarks.rst                  |   56 -
 docs/source/python/compute.rst                     |   55 -
 docs/source/python/csv.rst                         |  122 --
 docs/source/python/cuda.rst                        |  159 ---
 docs/source/python/data.rst                        |  433 -------
 docs/source/python/dataset.rst                     |  474 --------
 docs/source/python/extending.rst                   |  468 --------
 docs/source/python/extending_types.rst             |  325 ------
 docs/source/python/feather.rst                     |  109 --
 docs/source/python/filesystems.rst                 |  208 ----
 docs/source/python/filesystems_deprecated.rst      |   95 --
 docs/source/python/getting_involved.rst            |   35 -
 docs/source/python/index.rst                       |   57 -
 docs/source/python/install.rst                     |   90 --
 docs/source/python/ipc.rst                         |  316 -----
 docs/source/python/json.rst                        |  117 --
 docs/source/python/memory.rst                      |  298 -----
 docs/source/python/numpy.rst                       |   75 --
 docs/source/python/pandas.rst                      |  309 -----
 docs/source/python/parquet.rst                     |  590 ----------
 docs/source/python/plasma.rst                      |  464 --------
 docs/source/python/timestamps.rst                  |  198 ----
 docs/source/status.rst                             |  237 ----
 126 files changed, 19651 deletions(-)

diff --git a/docs/.gitignore b/docs/.gitignore
deleted file mode 100644
index d2e9f6c..0000000
--- a/docs/.gitignore
+++ /dev/null
@@ -1,19 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-_build
-source/python/generated
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index e38bc91..0000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,247 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-
-# Do not fail the build if there are warnings
-# SPHINXOPTS    = -j8 -W
-SPHINXOPTS    = -j8
-
-SPHINXBUILD   = sphinx-build
-PAPER         =
-BUILDDIR      = _build
-
-# Internal variables.
-PAPEROPT_a4     = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
-
-.PHONY: help
-help:
-	@echo "Please use \`make <target>' where <target> is one of"
-	@echo "  html       to make standalone HTML files"
-	@echo "  dirhtml    to make HTML files named index.html in directories"
-	@echo "  singlehtml to make a single large HTML file"
-	@echo "  pickle     to make pickle files"
-	@echo "  json       to make JSON files"
-	@echo "  htmlhelp   to make HTML files and a HTML help project"
-	@echo "  qthelp     to make HTML files and a qthelp project"
-	@echo "  applehelp  to make an Apple Help Book"
-	@echo "  devhelp    to make HTML files and a Devhelp project"
-	@echo "  epub       to make an epub"
-	@echo "  epub3      to make an epub3"
-	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
-	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
-	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
-	@echo "  text       to make text files"
-	@echo "  man        to make manual pages"
-	@echo "  texinfo    to make Texinfo files"
-	@echo "  info       to make Texinfo files and run them through makeinfo"
-	@echo "  gettext    to make PO message catalogs"
-	@echo "  changes    to make an overview of all changed/added/deprecated items"
-	@echo "  xml        to make Docutils-native XML files"
-	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
-	@echo "  linkcheck  to check all external links for integrity"
-	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
-	@echo "  coverage   to run coverage check of the documentation (if enabled)"
-	@echo "  dummy      to check syntax errors of document sources"
-
-.PHONY: clean
-clean:
-	rm -rf $(BUILDDIR)/*
-
-.PHONY: html
-html:
-	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-.PHONY: dirhtml
-dirhtml:
-	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-
-.PHONY: singlehtml
-singlehtml:
-	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
-	@echo
-	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
-
-.PHONY: pickle
-pickle:
-	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
-	@echo
-	@echo "Build finished; now you can process the pickle files."
-
-.PHONY: json
-json:
-	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
-	@echo
-	@echo "Build finished; now you can process the JSON files."
-
-.PHONY: htmlhelp
-htmlhelp:
-	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
-	@echo
-	@echo "Build finished; now you can run HTML Help Workshop with the" \
-	      ".hhp project file in $(BUILDDIR)/htmlhelp."
-
-.PHONY: qthelp
-qthelp:
-	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
-	@echo
-	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
-	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
-	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pyarrow.qhcp"
-	@echo "To view the help file:"
-	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pyarrow.qhc"
-
-.PHONY: applehelp
-applehelp:
-	$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
-	@echo
-	@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
-	@echo "N.B. You won't be able to view it unless you put it in" \
-	      "~/Library/Documentation/Help or install it in your application" \
-	      "bundle."
-
-.PHONY: devhelp
-devhelp:
-	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
-	@echo
-	@echo "Build finished."
-	@echo "To view the help file:"
-	@echo "# mkdir -p $$HOME/.local/share/devhelp/pyarrow"
-	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pyarrow"
-	@echo "# devhelp"
-
-.PHONY: epub
-epub:
-	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
-	@echo
-	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
-
-.PHONY: epub3
-epub3:
-	$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
-	@echo
-	@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
-
-.PHONY: latex
-latex:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo
-	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
-	@echo "Run \`make' in that directory to run these through (pdf)latex" \
-	      "(use \`make latexpdf' here to do that automatically)."
-
-.PHONY: latexpdf
-latexpdf:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo "Running LaTeX files through pdflatex..."
-	$(MAKE) -C $(BUILDDIR)/latex all-pdf
-	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-.PHONY: latexpdfja
-latexpdfja:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo "Running LaTeX files through platex and dvipdfmx..."
-	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
-	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-.PHONY: text
-text:
-	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
-	@echo
-	@echo "Build finished. The text files are in $(BUILDDIR)/text."
-
-.PHONY: man
-man:
-	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
-	@echo
-	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
-
-.PHONY: texinfo
-texinfo:
-	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
-	@echo
-	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
-	@echo "Run \`make' in that directory to run these through makeinfo" \
-	      "(use \`make info' here to do that automatically)."
-
-.PHONY: info
-info:
-	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
-	@echo "Running Texinfo files through makeinfo..."
-	make -C $(BUILDDIR)/texinfo info
-	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
-
-.PHONY: gettext
-gettext:
-	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
-	@echo
-	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
-
-.PHONY: changes
-changes:
-	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
-	@echo
-	@echo "The overview file is in $(BUILDDIR)/changes."
-
-.PHONY: linkcheck
-linkcheck:
-	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
-	@echo
-	@echo "Link check complete; look for any errors in the above output " \
-	      "or in $(BUILDDIR)/linkcheck/output.txt."
-
-.PHONY: doctest
-doctest:
-	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
-	@echo "Testing of doctests in the sources finished, look at the " \
-	      "results in $(BUILDDIR)/doctest/output.txt."
-
-.PHONY: coverage
-coverage:
-	$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
-	@echo "Testing of coverage in the sources finished, look at the " \
-	      "results in $(BUILDDIR)/coverage/python.txt."
-
-.PHONY: xml
-xml:
-	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
-	@echo
-	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
-
-.PHONY: pseudoxml
-pseudoxml:
-	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
-	@echo
-	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
-
-.PHONY: dummy
-dummy:
-	$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
-	@echo
-	@echo "Build finished. Dummy builder generates no files."
diff --git a/docs/README.md b/docs/README.md
deleted file mode 100644
index 2130426..0000000
--- a/docs/README.md
+++ /dev/null
@@ -1,30 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Apache Arrow Documentation
-
-This directory contains source files for building the main project
-documentation. This includes the [Arrow columnar format specification][2].
-
-Instructions for building the documentation site are found in
-[docs/source/developers/documentation.rst][1]. The build depends on the API
-documentation for some of the project subcomponents.
-
-[1]: https://github.com/apache/arrow/blob/master/docs/source/developers/documentation.rst
-[2]: https://github.com/apache/arrow/tree/master/docs/source/format
diff --git a/docs/environment.yml b/docs/environment.yml
deleted file mode 100644
index 8d1fe9b..0000000
--- a/docs/environment.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-channels:
-- defaults
-- conda-forge
-dependencies:
-- arrow-cpp
-- parquet-cpp
-- pyarrow
-- numpydoc
diff --git a/docs/make.bat b/docs/make.bat
deleted file mode 100644
index 36f2086..0000000
--- a/docs/make.bat
+++ /dev/null
@@ -1,52 +0,0 @@
-@rem Licensed to the Apache Software Foundation (ASF) under one
-@rem or more contributor license agreements.  See the NOTICE file
-@rem distributed with this work for additional information
-@rem regarding copyright ownership.  The ASF licenses this file
-@rem to you under the Apache License, Version 2.0 (the
-@rem "License"); you may not use this file except in compliance
-@rem with the License.  You may obtain a copy of the License at
-@rem
-@rem   http://www.apache.org/licenses/LICENSE-2.0
-@rem
-@rem Unless required by applicable law or agreed to in writing,
-@rem software distributed under the License is distributed on an
-@rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-@rem KIND, either express or implied.  See the License for the
-@rem specific language governing permissions and limitations
-@rem under the License.
-
-@ECHO OFF
-
-pushd %~dp0
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
-	set SPHINXBUILD=sphinx-build
-)
-set SOURCEDIR=source
-set BUILDDIR=_build
-
-if "%1" == "" goto help
-
-%SPHINXBUILD% >NUL 2>NUL
-if errorlevel 9009 (
-	echo.
-	echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
-	echo.installed, then set the SPHINXBUILD environment variable to point
-	echo.to the full path of the 'sphinx-build' executable. Alternatively you
-	echo.may add the Sphinx directory to PATH.
-	echo.
-	echo.If you don't have Sphinx installed, grab it from
-	echo.http://sphinx-doc.org/
-	exit /b 1
-)
-
-%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
-goto end
-
-:help
-%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
-
-:end
-popd
diff --git a/docs/requirements.txt b/docs/requirements.txt
deleted file mode 100644
index 0dbca69..0000000
--- a/docs/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-breathe
-ipython
-numpydoc
-sphinx==2.4.4
-pydata-sphinx-theme
diff --git a/docs/source/_static/arrow.png b/docs/source/_static/arrow.png
deleted file mode 100644
index 72104b0..0000000
Binary files a/docs/source/_static/arrow.png and /dev/null differ
diff --git a/docs/source/_static/favicon.ico b/docs/source/_static/favicon.ico
deleted file mode 100644
index 33a554a..0000000
Binary files a/docs/source/_static/favicon.ico and /dev/null differ
diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css
deleted file mode 100644
index f623b3b..0000000
--- a/docs/source/_static/theme_overrides.css
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/* Customizing with theme CSS variables */
-
-:root {
-  --pst-color-active-navigation: 215, 70, 51;
-  --pst-color-link-hover: 215, 70, 51;
-  --pst-color-headerlink: 215, 70, 51;
-  /* Use normal text color (like h3, ..) instead of primary color */
-  --pst-color-h1: var(--color-text-base);
-  --pst-color-h2: var(--color-text-base);
-  /* Use softer blue from bootstrap's default info color */
-  --pst-color-info: 23, 162, 184;
-  --pst-header-height: 0px;
-}
-
-code {
-  color: rgb(215, 70, 51);
-}
-
-.footer {
-  text-align: center;
-}
-
-/* Ensure the logo is properly displayed */
-
-.navbar-brand {
-  height: auto;
-  width: auto;
-}
-
-a.navbar-brand img {
-  height: auto;
-  width: auto;
-  max-height: 15vh;
-  max-width: 100%;
-}
-
-
-/* Limit the max height of the sidebar navigation section. Because in our
-custimized template, there is more content above the navigation, i.e.
-larger logo: if we don't decrease the max-height, it will overlap with
-the footer.
-Details: min(15vh, 110px) for the logo size, 8rem for search box etc*/
-
-@media (min-width:720px) {
-  @supports (position:-webkit-sticky) or (position:sticky) {
-    .bd-links {
-      max-height: calc(100vh - min(15vh, 110px) - 8rem)
-    }
-  }
-}
-
-
-/* Fix table text wrapping in RTD theme,
- * see https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html
- */
-
-@media screen {
-    table.docutils td {
-        /* !important prevents the common CSS stylesheets from overriding
-          this as on RTD they are loaded after this stylesheet */
-        white-space: normal !important;
-    }
-}
diff --git a/docs/source/_templates/docs-sidebar.html b/docs/source/_templates/docs-sidebar.html
deleted file mode 100644
index f6ee66c..0000000
--- a/docs/source/_templates/docs-sidebar.html
+++ /dev/null
@@ -1,19 +0,0 @@
-
-<a class="navbar-brand" href="{{ pathto(master_doc) }}">
-  <img src="{{ pathto('_static/' + logo, 1) }}" class="logo" alt="logo">
-</a>
-
-<form class="bd-search d-flex align-items-center" action="{{ pathto('search') }}" method="get">
-  <i class="icon fas fa-search"></i>
-  <input type="search" class="form-control" name="q" id="search-input" placeholder="{{ theme_search_bar_text }}" aria-label="{{ theme_search_bar_text }}" autocomplete="off" >
-</form>
-
-<nav class="bd-links" id="bd-docs-nav" aria-label="Main navigation">
-  <div class="bd-toc-item active">
-    {% if "python/api" in pagename or "python/generated" in pagename %}
-    {{ generate_nav_html("sidebar", startdepth=0, maxdepth=3, collapse=False, includehidden=True, titles_only=True) }}
-    {% else %}
-    {{ generate_nav_html("sidebar", startdepth=0, maxdepth=4, collapse=False, includehidden=True, titles_only=True) }}
-    {% endif %}
-  </div>
-</nav>
diff --git a/docs/source/_templates/layout.html b/docs/source/_templates/layout.html
deleted file mode 100644
index a9d0f30..0000000
--- a/docs/source/_templates/layout.html
+++ /dev/null
@@ -1,5 +0,0 @@
-{% extends "pydata_sphinx_theme/layout.html" %}
-
-{# Silence the navbar #}
-{% block docs_navbar %}
-{% endblock %}
diff --git a/docs/source/conf.py b/docs/source/conf.py
deleted file mode 100644
index 05a4553..0000000
--- a/docs/source/conf.py
+++ /dev/null
@@ -1,444 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#
-
-import os
-import sys
-from unittest import mock
-
-import pyarrow
-
-
-sys.path.extend([
-    os.path.join(os.path.dirname(__file__),
-                 '..', '../..')
-
-])
-
-# -- General configuration ------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-#
-# needs_sphinx = '1.0'
-
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
-extensions = [
-    'sphinx.ext.autodoc',
-    'sphinx.ext.autosummary',
-    'sphinx.ext.doctest',
-    'sphinx.ext.ifconfig',
-    'sphinx.ext.mathjax',
-    'sphinx.ext.viewcode',
-    'sphinx.ext.napoleon',
-    'IPython.sphinxext.ipython_directive',
-    'IPython.sphinxext.ipython_console_highlighting',
-    'breathe'
-]
-
-# Show members for classes in .. autosummary
-autodoc_default_options = {
-    'members': None,
-    'undoc-members': None,
-    'show-inheritance': None,
-    'inherited-members': None
-}
-
-# Breathe configuration
-breathe_projects = {"arrow_cpp": "../../cpp/apidoc/xml"}
-breathe_default_project = "arrow_cpp"
-
-# Overriden conditionally below
-autodoc_mock_imports = []
-
-# ipython directive options
-ipython_mplbackend = ''
-
-# numpydoc configuration
-napoleon_use_rtype = False
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix(es) of source filenames.
-# You can specify multiple suffix as a list of string:
-#
-
-source_suffix = ['.rst']
-
-autosummary_generate = True
-
-# The encoding of source files.
-#
-# source_encoding = 'utf-8-sig'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = u'Apache Arrow'
-copyright = u'2016-2019 Apache Software Foundation'
-author = u'Apache Software Foundation'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = os.environ.get('ARROW_DOCS_VERSION',
-                         pyarrow.__version__)
-# The full version, including alpha/beta/rc tags.
-release = os.environ.get('ARROW_DOCS_VERSION',
-                         pyarrow.__version__)
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#
-# This is also used if you do content translation via gettext catalogs.
-# Usually you set "language" from the command line for these cases.
-language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#
-# today = ''
-#
-# Else, today_fmt is used as the format for a strftime call.
-#
-# today_fmt = '%B %d, %Y'
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-# This patterns also effect to html_static_path and html_extra_path
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
-
-# The reST default role (used for this markup: `text`) to use for all
-# documents.
-#
-# default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#
-# add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#
-# add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#
-# show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# A list of ignored prefixes for module index sorting.
-# modindex_common_prefix = []
-
-# If true, keep warnings as "system message" paragraphs in the built documents.
-# keep_warnings = False
-
-# If true, `todo` and `todoList` produce output, else they produce nothing.
-todo_include_todos = False
-
-
-# -- Options for HTML output ----------------------------------------------
-
-# The theme to use for HTML and HTML Help pages.  See the documentation for
-# a list of builtin themes.
-#
-html_theme = 'pydata_sphinx_theme'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further.  For a list of options available for each theme, see the
-# documentation.
-#
-html_theme_options = {
-    "show_toc_level": 2,
-    "google_analytics_id": "UA-107500873-1",
-}
-
-# Add any paths that contain custom themes here, relative to this directory.
-# html_theme_path = []
-
-# The name for this set of Sphinx documents.
-# "<project> v<release> documentation" by default.
-#
-html_title = u'Apache Arrow v{}'.format(version)
-
-# A shorter title for the navigation bar.  Default is the same as html_title.
-#
-# html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#
-html_logo = "_static/arrow.png"
-
-# The name of an image file (relative to this directory) to use as a favicon of
-# the docs.  This file should be a Windows icon file (.ico) being 16x16 or
-# 32x32 pixels large.
-#
-html_favicon = "_static/favicon.ico"
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
-
-# Custom fixes to the RTD theme
-html_css_files = ['theme_overrides.css']
-
-# Add any extra paths that contain custom files (such as robots.txt or
-# .htaccess) here, relative to this directory. These files are copied
-# directly to the root of the documentation.
-#
-# html_extra_path = []
-
-# If not None, a 'Last updated on:' timestamp is inserted at every page
-# bottom, using the given strftime format.
-# The empty string is equivalent to '%b %d, %Y'.
-#
-# html_last_updated_fmt = None
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-#
-# html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-#
-html_sidebars = {
-#    '**': ['sidebar-logo.html', 'sidebar-search-bs.html', 'sidebar-nav-bs.html'],
-    '**': ['docs-sidebar.html'],
-}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#
-# html_additional_pages = {}
-
-# If false, no module index is generated.
-#
-# html_domain_indices = True
-
-# If false, no index is generated.
-#
-# html_use_index = True
-
-# If true, the index is split into individual pages for each letter.
-#
-# html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#
-# html_show_sourcelink = True
-
-# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#
-# html_show_sphinx = True
-
-# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#
-# html_show_copyright = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a <link> tag referring to it.  The value of this option must be the
-# base URL from which the finished HTML is served.
-#
-# html_use_opensearch = ''
-
-# This is the file name suffix for HTML files (e.g. ".xhtml").
-# html_file_suffix = None
-
-# Language to be used for generating the HTML full-text search index.
-# Sphinx supports the following languages:
-#   'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
-#   'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
-#
-# html_search_language = 'en'
-
-# A dictionary with options for the search language support, empty by default.
-# 'ja' uses this config value.
-# 'zh' user can custom change `jieba` dictionary path.
-#
-# html_search_options = {'type': 'default'}
-
-# The name of a javascript file (relative to the configuration directory) that
-# implements a search results scorer. If empty, the default will be used.
-#
-# html_search_scorer = 'scorer.js'
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'arrowdoc'
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
-     # The paper size ('letterpaper' or 'a4paper').
-     #
-     # 'papersize': 'letterpaper',
-
-     # The font size ('10pt', '11pt' or '12pt').
-     #
-     # 'pointsize': '10pt',
-
-     # Additional stuff for the LaTeX preamble.
-     #
-     # 'preamble': '',
-
-     # Latex figure (float) alignment
-     #
-     # 'figure_align': 'htbp',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-#  author, documentclass [howto, manual, or own class]).
-latex_documents = [
-    (master_doc, 'arrow.tex', u'Apache Arrow Documentation',
-     u'Apache Arrow Team', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#
-# latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#
-# latex_use_parts = False
-
-# If true, show page references after internal links.
-#
-# latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#
-# latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#
-# latex_appendices = []
-
-# It false, will not define \strong, \code, 	itleref, \crossref ... but only
-# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
-# packages.
-#
-# latex_keep_old_macro_names = True
-
-# If false, no module index is generated.
-#
-# latex_domain_indices = True
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
-    (master_doc, 'arrow', u'Apache Arrow Documentation',
-     [author], 1)
-]
-
-# If true, show URL addresses after external links.
-#
-# man_show_urls = False
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-#  dir menu entry, description, category)
-texinfo_documents = [
-    (master_doc, 'arrow', u'Apache Arrow Documentation',
-     author, 'Apache Arrow', 'One line description of project.',
-     'Miscellaneous'),
-]
-
-# Documents to append as an appendix to all manuals.
-#
-# texinfo_appendices = []
-
-# If false, no module index is generated.
-#
-# texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#
-# texinfo_show_urls = 'footnote'
-
-# If true, do not generate a @detailmenu in the "Top" node's menu.
-#
-# texinfo_no_detailmenu = False
-
-
-# -- Customization --------------------------------------------------------
-
-# Conditional API doc generation
-
-# Sphinx has two features for conditional inclusion:
-# - The "only" directive
-#   https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#including-content-based-on-tags
-# - The "ifconfig" extension
-#   https://www.sphinx-doc.org/en/master/usage/extensions/ifconfig.html
-#
-# Both have issues, but "ifconfig" seems to work in this setting.
-
-try:
-    import pyarrow.cuda
-    cuda_enabled = True
-except ImportError:
-    cuda_enabled = False
-    # Mock pyarrow.cuda to avoid autodoc warnings.
-    # XXX I can't get autodoc_mock_imports to work, so mock manually instead
-    # (https://github.com/sphinx-doc/sphinx/issues/2174#issuecomment-453177550)
-    pyarrow.cuda = sys.modules['pyarrow.cuda'] = mock.Mock()
-
-try:
-    import pyarrow.flight
-    flight_enabled = True
-except ImportError:
-    flight_enabled = False
-    pyarrow.flight = sys.modules['pyarrow.flight'] = mock.Mock()
-
-
-def setup(app):
-    # Use a config value to indicate whether CUDA API docs can be generated.
-    # This will also rebuild appropriately when the value changes.
-    app.add_config_value('cuda_enabled', cuda_enabled, 'env')
-    app.add_config_value('flight_enabled', flight_enabled, 'env')
diff --git a/docs/source/cpp/api.rst b/docs/source/cpp/api.rst
deleted file mode 100644
index 3df16a1..0000000
--- a/docs/source/cpp/api.rst
+++ /dev/null
@@ -1,42 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-*************
-API Reference
-*************
-
-.. toctree::
-   :maxdepth: 3
-
-   api/support
-   api/memory
-   api/datatype
-   api/array
-   api/scalar
-   api/builder
-   api/table
-   api/c_abi
-   api/compute
-   api/tensor
-   api/utilities
-   api/io
-   api/ipc
-   api/formats
-   api/cuda
-   api/flight
-   api/filesystem
-   api/dataset
diff --git a/docs/source/cpp/api/array.rst b/docs/source/cpp/api/array.rst
deleted file mode 100644
index bb981d1..0000000
--- a/docs/source/cpp/api/array.rst
+++ /dev/null
@@ -1,92 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-======
-Arrays
-======
-
-.. doxygenclass:: arrow::Array
-   :project: arrow_cpp
-   :members:
-
-Concrete array subclasses
-=========================
-
-.. doxygenclass:: arrow::DictionaryArray
-   :project: arrow_cpp
-   :members:
-
-Non-nested
-----------
-
-.. doxygenclass:: arrow::FlatArray
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::NullArray
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::BinaryArray
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::StringArray
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::PrimitiveArray
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::BooleanArray
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::FixedSizeBinaryArray
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::Decimal128Array
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::NumericArray
-   :project: arrow_cpp
-   :members:
-
-Nested
-------
-
-.. doxygenclass:: arrow::UnionArray
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::ListArray
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::StructArray
-   :project: arrow_cpp
-   :members:
-
-Chunked Arrays
-==============
-
-.. doxygenclass:: arrow::ChunkedArray
-   :project: arrow_cpp
-   :members:
diff --git a/docs/source/cpp/api/builder.rst b/docs/source/cpp/api/builder.rst
deleted file mode 100644
index 9e6540a..0000000
--- a/docs/source/cpp/api/builder.rst
+++ /dev/null
@@ -1,56 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-==============
-Array Builders
-==============
-
-.. doxygenclass:: arrow::ArrayBuilder
-   :members:
-
-Concrete builder subclasses
-===========================
-
-.. doxygenclass:: arrow::NullBuilder
-   :members:
-
-.. doxygenclass:: arrow::BooleanBuilder
-   :members:
-
-.. doxygenclass:: arrow::NumericBuilder
-   :members:
-
-.. doxygenclass:: arrow::BinaryBuilder
-   :members:
-
-.. doxygenclass:: arrow::StringBuilder
-   :members:
-
-.. doxygenclass:: arrow::FixedSizeBinaryBuilder
-   :members:
-
-.. doxygenclass:: arrow::Decimal128Builder
-   :members:
-
-.. doxygenclass:: arrow::ListBuilder
-   :members:
-
-.. doxygenclass:: arrow::StructBuilder
-   :members:
-
-.. doxygenclass:: arrow::DictionaryBuilder
-   :members:
diff --git a/docs/source/cpp/api/c_abi.rst b/docs/source/cpp/api/c_abi.rst
deleted file mode 100644
index 4e451c3..0000000
--- a/docs/source/cpp/api/c_abi.rst
+++ /dev/null
@@ -1,48 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-============
-C Interfaces
-============
-
-.. seealso::
-   The :ref:`C data interface <c-data-interface>` and
-   :ref:`C stream interface <c-stream-interface>` specifications.
-
-ABI Structures
-==============
-
-.. doxygenstruct:: ArrowSchema
-   :project: arrow_cpp
-
-.. doxygenstruct:: ArrowArray
-   :project: arrow_cpp
-
-.. doxygenstruct:: ArrowArrayStream
-   :project: arrow_cpp
-
-C Data Interface
-================
-
-.. doxygengroup:: c-data-interface
-   :content-only:
-
-C Stream Interface
-==================
-
-.. doxygengroup:: c-stream-interface
-   :content-only:
diff --git a/docs/source/cpp/api/compute.rst b/docs/source/cpp/api/compute.rst
deleted file mode 100644
index 3b0a89f..0000000
--- a/docs/source/cpp/api/compute.rst
+++ /dev/null
@@ -1,56 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-Compute Functions
-=================
-
-Datum class
------------
-
-.. doxygenclass:: arrow::Datum
-   :members:
-
-Abstract Function classes
--------------------------
-
-.. doxygengroup:: compute-functions
-   :content-only:
-   :members:
-
-Function registry
------------------
-
-.. doxygenclass:: arrow::compute::FunctionRegistry
-   :members:
-
-.. doxygenfunction:: arrow::compute::GetFunctionRegistry
-
-Convenience functions
----------------------
-
-.. doxygengroup:: compute-call-function
-   :content-only:
-
-Concrete options classes
-------------------------
-
-.. doxygengroup:: compute-concrete-options
-   :content-only:
-   :members:
-   :undoc-members:
-
-.. TODO: List concrete function invocation shortcuts?
diff --git a/docs/source/cpp/api/cuda.rst b/docs/source/cpp/api/cuda.rst
deleted file mode 100644
index caeb5be..0000000
--- a/docs/source/cpp/api/cuda.rst
+++ /dev/null
@@ -1,74 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-============
-CUDA support
-============
-
-Contexts
-========
-
-.. doxygenclass:: arrow::cuda::CudaDeviceManager
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::cuda::CudaContext
-   :project: arrow_cpp
-   :members:
-
-Devices
-=======
-
-.. doxygenclass:: arrow::cuda::CudaDevice
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::cuda::CudaMemoryManager
-   :project: arrow_cpp
-   :members:
-
-Buffers
-=======
-
-.. doxygenclass:: arrow::cuda::CudaBuffer
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::cuda::CudaHostBuffer
-   :project: arrow_cpp
-   :members:
-
-Memory Input / Output
-=====================
-
-.. doxygenclass:: arrow::cuda::CudaBufferReader
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::cuda::CudaBufferWriter
-   :project: arrow_cpp
-   :members:
-
-IPC
-===
-
-.. doxygenclass:: arrow::cuda::CudaIpcMemHandle
-   :project: arrow_cpp
-   :members:
-
-.. doxygengroup:: cuda-ipc-functions
-   :content-only:
diff --git a/docs/source/cpp/api/dataset.rst b/docs/source/cpp/api/dataset.rst
deleted file mode 100644
index 3f0df8a..0000000
--- a/docs/source/cpp/api/dataset.rst
+++ /dev/null
@@ -1,71 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-=======
-Dataset
-=======
-
-Interface
-=========
-
-.. doxygenclass:: arrow::dataset::Fragment
-   :members:
-
-.. doxygenclass:: arrow::dataset::Dataset
-   :members:
-
-Partitioning
-============
-
-.. doxygengroup:: dataset-partitioning
-   :content-only:
-   :members:
-
-Dataset discovery/factories
-===========================
-
-.. doxygengroup:: dataset-discovery
-   :content-only:
-   :members:
-
-Scanning
-========
-
-.. doxygengroup:: dataset-scanning
-   :content-only:
-   :members:
-
-Concrete implementations
-========================
-
-.. doxygengroup:: dataset-implementations
-   :content-only:
-   :members:
-
-File System Datasets
---------------------
-
-.. doxygengroup:: dataset-filesystem
-   :content-only:
-   :members:
-
-File Formats
-------------
-
-.. doxygengroup:: dataset-file-formats
-   :content-only:
-   :members:
diff --git a/docs/source/cpp/api/datatype.rst b/docs/source/cpp/api/datatype.rst
deleted file mode 100644
index cc2bb71..0000000
--- a/docs/source/cpp/api/datatype.rst
+++ /dev/null
@@ -1,151 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-==========
-Data Types
-==========
-
-.. doxygenenum:: arrow::Type::type
-
-.. doxygenclass:: arrow::DataType
-   :members:
-
-.. _api-type-factories:
-
-Factory functions
-=================
-
-These functions are recommended for creating data types.  They may return
-new objects or existing singletons, depending on the type requested.
-
-.. doxygengroup:: type-factories
-   :project: arrow_cpp
-   :content-only:
-
-Concrete type subclasses
-========================
-
-Primitive
----------
-
-.. doxygenclass:: arrow::NullType
-   :members:
-
-.. doxygenclass:: arrow::BooleanType
-   :members:
-
-.. doxygenclass:: arrow::Int8Type
-   :members:
-
-.. doxygenclass:: arrow::Int16Type
-   :members:
-
-.. doxygenclass:: arrow::Int32Type
-   :members:
-
-.. doxygenclass:: arrow::Int64Type
-   :members:
-
-.. doxygenclass:: arrow::UInt8Type
-   :members:
-
-.. doxygenclass:: arrow::UInt16Type
-   :members:
-
-.. doxygenclass:: arrow::UInt32Type
-   :members:
-
-.. doxygenclass:: arrow::UInt64Type
-   :members:
-
-.. doxygenclass:: arrow::HalfFloatType
-   :members:
-
-.. doxygenclass:: arrow::FloatType
-   :members:
-
-.. doxygenclass:: arrow::DoubleType
-   :members:
-
-Time-related
-------------
-
-.. doxygenenum:: arrow::TimeUnit::type
-
-.. doxygenclass:: arrow::Date32Type
-   :members:
-
-.. doxygenclass:: arrow::Date64Type
-   :members:
-
-.. doxygenclass:: arrow::Time32Type
-   :members:
-
-.. doxygenclass:: arrow::Time64Type
-   :members:
-
-.. doxygenclass:: arrow::TimestampType
-   :members:
-
-Binary-like
------------
-
-.. doxygenclass:: arrow::BinaryType
-   :members:
-
-.. doxygenclass:: arrow::StringType
-   :members:
-
-.. doxygenclass:: arrow::FixedSizeBinaryType
-   :members:
-
-.. doxygenclass:: arrow::Decimal128Type
-   :members:
-
-Nested
-------
-
-.. doxygenclass:: arrow::ListType
-   :members:
-
-.. doxygenclass:: arrow::MapType
-   :members:
-
-.. doxygenclass:: arrow::StructType
-   :members:
-
-.. doxygenclass:: arrow::UnionType
-   :members:
-
-Dictionary-encoded
-------------------
-
-.. doxygenclass:: arrow::DictionaryType
-   :members:
-
-Fields and Schemas
-==================
-
-.. doxygengroup:: schema-factories
-   :project: arrow_cpp
-   :content-only:
-
-.. doxygenclass:: arrow::Field
-   :members:
-
-.. doxygenclass:: arrow::Schema
-   :members:
diff --git a/docs/source/cpp/api/filesystem.rst b/docs/source/cpp/api/filesystem.rst
deleted file mode 100644
index 02fff9a..0000000
--- a/docs/source/cpp/api/filesystem.rst
+++ /dev/null
@@ -1,64 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-===========
-Filesystems
-===========
-
-Interface
-=========
-
-.. doxygenenum:: arrow::fs::FileType
-
-.. doxygenstruct:: arrow::fs::FileInfo
-   :members:
-
-.. doxygenstruct:: arrow::fs::FileSelector
-   :members:
-
-.. doxygenclass:: arrow::fs::FileSystem
-   :members:
-
-High-level factory function
-===========================
-
-.. doxygengroup:: filesystem-factories
-   :content-only:
-
-Concrete implementations
-========================
-
-.. doxygenclass:: arrow::fs::SubTreeFileSystem
-   :members:
-
-.. doxygenstruct:: arrow::fs::LocalFileSystemOptions
-   :members:
-
-.. doxygenclass:: arrow::fs::LocalFileSystem
-   :members:
-
-.. doxygenstruct:: arrow::fs::S3Options
-   :members:
-
-.. doxygenclass:: arrow::fs::S3FileSystem
-   :members:
-
-.. doxygenstruct:: arrow::fs::HdfsOptions
-   :members:
-
-.. doxygenclass:: arrow::fs::HadoopFileSystem
-   :members:
diff --git a/docs/source/cpp/api/flight.rst b/docs/source/cpp/api/flight.rst
deleted file mode 100644
index 7cefd66..0000000
--- a/docs/source/cpp/api/flight.rst
+++ /dev/null
@@ -1,202 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-================
-Arrow Flight RPC
-================
-
-.. note:: Flight is currently unstable. APIs are subject to change,
-          though we don't expect drastic changes.
-
-Common Types
-============
-
-.. doxygenstruct:: arrow::flight::Action
-   :project: arrow_cpp
-   :members:
-
-.. doxygenstruct:: arrow::flight::ActionType
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::AddCallHeaders
-   :project: arrow_cpp
-   :members:
-
-.. doxygenstruct:: arrow::flight::CallInfo
-   :project: arrow_cpp
-   :members:
-
-.. doxygenstruct:: arrow::flight::Criteria
-   :project: arrow_cpp
-   :members:
-
-.. doxygenstruct:: arrow::flight::FlightDescriptor
-   :project: arrow_cpp
-   :members:
-
-.. doxygenstruct:: arrow::flight::FlightEndpoint
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::FlightInfo
-   :project: arrow_cpp
-   :members:
-
-.. doxygenstruct:: arrow::flight::FlightPayload
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::FlightListing
-   :project: arrow_cpp
-   :members:
-
-.. doxygenenum:: arrow::flight::FlightMethod
-   :project: arrow_cpp
-
-.. doxygenstruct:: arrow::flight::Location
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::MetadataRecordBatchReader
-   :project: arrow_cpp
-   :members:
-
-.. doxygenstruct:: arrow::flight::Result
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::ResultStream
-   :project: arrow_cpp
-   :members:
-
-.. doxygenstruct:: arrow::flight::Ticket
-   :project: arrow_cpp
-   :members:
-
-Clients
-=======
-
-.. doxygenclass:: arrow::flight::FlightClient
-   :project: arrow_cpp
-   :members:
-
-.. doxygenstruct:: arrow::flight::FlightClientOptions
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::FlightCallOptions
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::ClientAuthHandler
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::ClientMiddleware
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::ClientMiddlewareFactory
-   :project: arrow_cpp
-   :members:
-
-.. doxygentypedef:: arrow::flight::TimeoutDuration
-   :project: arrow_cpp
-
-.. doxygenclass:: arrow::flight::FlightStreamReader
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::FlightStreamWriter
-   :project: arrow_cpp
-   :members:
-
-Servers
-=======
-
-.. doxygenclass:: arrow::flight::FlightServerBase
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::FlightServerOptions
-   :project: arrow_cpp
-   :members:
-
-.. doxygenstruct:: arrow::flight::CertKeyPair
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::FlightDataStream
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::FlightMessageReader
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::FlightMetadataWriter
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::RecordBatchStream
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::ServerAuthHandler
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::ServerCallContext
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::ServerMiddleware
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::ServerMiddlewareFactory
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::SimpleFlightListing
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::flight::SimpleResultStream
-   :project: arrow_cpp
-   :members:
-
-Error Handling
-==============
-
-Error handling uses the normal :class:`arrow::Status` class, combined
-with a custom :class:`arrow::StatusDetail` object for Flight-specific
-error codes.
-
-.. doxygenenum:: arrow::flight::FlightStatusCode
-   :project: arrow_cpp
-
-.. doxygenclass:: arrow::flight::FlightStatusDetail
-   :project: arrow_cpp
-   :members:
-
-.. doxygenfunction:: arrow::flight::MakeFlightError
-   :project: arrow_cpp
diff --git a/docs/source/cpp/api/formats.rst b/docs/source/cpp/api/formats.rst
deleted file mode 100644
index a072f11..0000000
--- a/docs/source/cpp/api/formats.rst
+++ /dev/null
@@ -1,98 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-============
-File Formats
-============
-
-.. _cpp-api-csv:
-
-CSV
-===
-
-.. doxygenstruct:: arrow::csv::ReadOptions
-   :members:
-
-.. doxygenstruct:: arrow::csv::ParseOptions
-   :members:
-
-.. doxygenstruct:: arrow::csv::ConvertOptions
-   :members:
-
-.. doxygenclass:: arrow::csv::TableReader
-   :members:
-
-.. _cpp-api-json:
-
-Line-separated JSON
-===================
-
-.. doxygenenum:: arrow::json::UnexpectedFieldBehavior
-
-.. doxygenstruct:: arrow::json::ReadOptions
-   :members:
-
-.. doxygenstruct:: arrow::json::ParseOptions
-   :members:
-
-.. doxygenclass:: arrow::json::TableReader
-   :members:
-
-.. _cpp-api-parquet:
-
-Parquet reader
-==============
-
-.. doxygenclass:: parquet::ReaderProperties
-   :members:
-
-.. doxygenclass:: parquet::ArrowReaderProperties
-   :members:
-
-.. doxygenclass:: parquet::ParquetFileReader
-   :members:
-
-.. doxygenclass:: parquet::arrow::FileReader
-   :members:
-
-.. doxygenclass:: parquet::arrow::FileReaderBuilder
-   :members:
-
-.. doxygengroup:: parquet-arrow-reader-factories
-   :content-only:
-
-.. doxygenclass:: parquet::StreamReader
-   :members:
-
-Parquet writer
-==============
-
-.. doxygenclass:: parquet::WriterProperties
-   :members:
-
-.. doxygenclass:: parquet::ArrowWriterProperties
-   :members:
-
-.. doxygenclass:: parquet::arrow::FileWriter
-   :members:
-
-.. doxygenfunction:: parquet::arrow::WriteTable
-
-.. doxygenclass:: parquet::StreamWriter
-   :members:
-
-.. TODO ORC
diff --git a/docs/source/cpp/api/io.rst b/docs/source/cpp/api/io.rst
deleted file mode 100644
index 735136a..0000000
--- a/docs/source/cpp/api/io.rst
+++ /dev/null
@@ -1,95 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-==============
-Input / output
-==============
-
-Interfaces
-==========
-
-.. doxygenclass:: arrow::io::FileInterface
-   :members:
-
-.. doxygenclass:: arrow::io::Readable
-   :members:
-
-.. doxygenclass:: arrow::io::Seekable
-   :members:
-
-.. doxygenclass:: arrow::io::Writable
-   :members:
-
-.. doxygenclass:: arrow::io::InputStream
-   :members:
-
-.. doxygenclass:: arrow::io::RandomAccessFile
-   :members:
-
-.. doxygenclass:: arrow::io::OutputStream
-   :members:
-
-.. doxygenclass:: arrow::io::ReadWriteFileInterface
-   :members:
-
-Concrete implementations
-========================
-
-In-memory streams
------------------
-
-.. doxygenclass:: arrow::io::BufferReader
-   :members:
-
-.. doxygenclass:: arrow::io::MockOutputStream
-   :members:
-
-.. doxygenclass:: arrow::io::BufferOutputStream
-   :members:
-
-.. doxygenclass:: arrow::io::FixedSizeBufferWriter
-   :members:
-
-Local files
------------
-
-.. doxygenclass:: arrow::io::ReadableFile
-   :members:
-
-.. doxygenclass:: arrow::io::FileOutputStream
-   :members:
-
-.. doxygenclass:: arrow::io::MemoryMappedFile
-   :members:
-
-Buffering input / output wrappers
----------------------------------
-
-.. doxygenclass:: arrow::io::BufferedInputStream
-   :members:
-
-.. doxygenclass:: arrow::io::BufferedOutputStream
-   :members:
-
-Compressed input / output wrappers
-----------------------------------
-
-.. doxygenclass:: arrow::io::CompressedInputStream
-   :members:
-
-.. doxygenclass:: arrow::io::CompressedOutputStream
-   :members:
diff --git a/docs/source/cpp/api/ipc.rst b/docs/source/cpp/api/ipc.rst
deleted file mode 100644
index 6822b98..0000000
--- a/docs/source/cpp/api/ipc.rst
+++ /dev/null
@@ -1,90 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-=========
-Arrow IPC
-=========
-
-IPC options
-===========
-
-.. doxygenstruct:: arrow::ipc::IpcReadOptions
-   :members:
-
-.. doxygenstruct:: arrow::ipc::IpcWriteOptions
-   :members:
-
-Reading IPC streams and files
-=============================
-
-Blocking API
-------------
-
-Use either of these two classes, depending on which IPC format you want
-to read.  The file format requires a random-access file, while the stream
-format only requires a sequential input stream.
-
-.. doxygenclass:: arrow::ipc::RecordBatchStreamReader
-   :members:
-
-.. doxygenclass:: arrow::ipc::RecordBatchFileReader
-   :members:
-
-Event-driven API
-----------------
-
-To read an IPC stream in event-driven fashion, you must implement a
-:class:`~arrow::ipc::Listener` subclass that you will pass to
-:class:`~arrow::ipc::StreamDecoder`.
-
-.. doxygenclass:: arrow::ipc::Listener
-   :members:
-
-.. doxygenclass:: arrow::ipc::StreamDecoder
-   :members:
-
-Statistics
-----------
-
-.. doxygenstruct:: arrow::ipc::ReadStats
-   :members:
-
-Writing IPC streams and files
-=============================
-
-Blocking API
-------------
-
-The IPC stream format is only optionally terminated, whereas the IPC file format
-must include a terminating footer. Thus a writer of the IPC file format must be
-explicitly finalized with :func:`~arrow::ipc::RecordBatchWriter::Close()` or the resulting
-file will be corrupt.
-
-.. doxygengroup:: record-batch-writer-factories
-   :content-only:
-
-.. doxygenclass:: arrow::ipc::RecordBatchWriter
-   :members:
-
-Statistics
-----------
-
-.. doxygenstruct:: arrow::ipc::WriteStats
-   :members:
diff --git a/docs/source/cpp/api/memory.rst b/docs/source/cpp/api/memory.rst
deleted file mode 100644
index 807a4e2..0000000
--- a/docs/source/cpp/api/memory.rst
+++ /dev/null
@@ -1,124 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-Memory (management)
-===================
-
-Devices
--------
-
-.. doxygenclass:: arrow::Device
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::CPUDevice
-   :project: arrow_cpp
-   :members:
-
-.. doxygenfunction:: arrow::default_cpu_memory_manager
-   :project: arrow_cpp
-
-Memory Managers
----------------
-
-.. doxygenclass:: arrow::MemoryManager
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::CPUMemoryManager
-   :project: arrow_cpp
-   :members:
-
-Buffers
--------
-
-.. doxygenclass:: arrow::Buffer
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::MutableBuffer
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::ResizableBuffer
-   :project: arrow_cpp
-   :members:
-
-Memory Pools
-------------
-
-.. doxygenfunction:: arrow::default_memory_pool
-   :project: arrow_cpp
-
-.. doxygenfunction:: arrow::jemalloc_memory_pool
-   :project: arrow_cpp
-
-.. doxygenfunction:: arrow::mimalloc_memory_pool
-   :project: arrow_cpp
-
-.. doxygenfunction:: arrow::system_memory_pool
-   :project: arrow_cpp
-
-.. doxygenclass:: arrow::MemoryPool
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::LoggingMemoryPool
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::ProxyMemoryPool
-   :project: arrow_cpp
-   :members:
-
-Allocation Functions
---------------------
-
-These functions allocate a buffer from a particular memory pool.
-
-.. doxygengroup:: buffer-allocation-functions
-   :project: arrow_cpp
-   :content-only:
-
-Slicing
--------
-
-.. doxygengroup:: buffer-slicing-functions
-   :project: arrow_cpp
-   :content-only:
-
-Buffer Builders
----------------
-
-.. doxygenclass:: arrow::BufferBuilder
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::TypedBufferBuilder
-   :project: arrow_cpp
-   :members:
-
-STL Integration
----------------
-
-.. doxygenclass:: arrow::stl::allocator
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::stl::STLMemoryPool
-   :project: arrow_cpp
-   :members:
diff --git a/docs/source/cpp/api/scalar.rst b/docs/source/cpp/api/scalar.rst
deleted file mode 100644
index 391c9d5..0000000
--- a/docs/source/cpp/api/scalar.rst
+++ /dev/null
@@ -1,38 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-=======
-Scalars
-=======
-
-.. doxygenstruct:: arrow::Scalar
-   :project: arrow_cpp
-   :members:
-
-Factory functions
-=================
-
-.. doxygengroup:: scalar-factories
-   :content-only:
-
-Concrete scalar subclasses
-==========================
-
-.. doxygengroup:: concrete-scalar-classes
-   :content-only:
-   :members:
-   :undoc-members:
diff --git a/docs/source/cpp/api/support.rst b/docs/source/cpp/api/support.rst
deleted file mode 100644
index c3310e5..0000000
--- a/docs/source/cpp/api/support.rst
+++ /dev/null
@@ -1,57 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-===================
-Programming Support
-===================
-
-General information
--------------------
-
-.. doxygenfunction:: arrow::GetBuildInfo
-   :project: arrow_cpp
-
-.. doxygenstruct:: arrow::BuildInfo
-   :project: arrow_cpp
-   :members:
-
-Error return and reporting
---------------------------
-
-.. doxygenclass:: arrow::Status
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::StatusDetail
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::Result
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: parquet::ParquetException
-   :project: arrow_cpp
-   :members:
-
-.. doxygendefine:: ARROW_RETURN_NOT_OK
-
-.. doxygendefine:: ARROW_ASSIGN_OR_RAISE
-
-.. doxygendefine:: PARQUET_THROW_NOT_OK
-
-.. doxygendefine:: PARQUET_ASSIGN_OR_THROW
diff --git a/docs/source/cpp/api/table.rst b/docs/source/cpp/api/table.rst
deleted file mode 100644
index 53e2d72..0000000
--- a/docs/source/cpp/api/table.rst
+++ /dev/null
@@ -1,45 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-========================
-Two-dimensional Datasets
-========================
-
-Record Batches
-==============
-
-.. doxygenclass:: arrow::RecordBatch
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::RecordBatchReader
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::TableBatchReader
-   :project: arrow_cpp
-   :members:
-
-Tables
-======
-
-.. doxygenclass:: arrow::Table
-   :project: arrow_cpp
-   :members:
-
-.. doxygenfunction:: arrow::ConcatenateTables
-   :project: arrow_cpp
diff --git a/docs/source/cpp/api/tensor.rst b/docs/source/cpp/api/tensor.rst
deleted file mode 100644
index 1d51786..0000000
--- a/docs/source/cpp/api/tensor.rst
+++ /dev/null
@@ -1,57 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-=======
-Tensors
-=======
-
-Dense Tensors
-=============
-
-.. doxygenclass:: arrow::Tensor
-   :members:
-
-.. doxygenclass:: arrow::NumericTensor
-   :members:
-
-Sparse Tensors
-==============
-
-.. doxygenenum:: arrow::SparseTensorFormat::type
-
-.. doxygenclass:: arrow::SparseIndex
-   :members:
-
-.. doxygenclass:: arrow::SparseCOOIndex
-   :members:
-
-.. doxygenclass:: arrow::SparseCSRIndex
-   :members:
-
-.. doxygenclass:: arrow::SparseTensor
-   :members:
-
-.. doxygenclass:: arrow::SparseTensorImpl
-   :members:
-
-.. doxygentypedef:: arrow::SparseCOOTensor
-
-.. doxygentypedef:: arrow::SparseCSCMatrix
-
-.. doxygentypedef:: arrow::SparseCSFTensor
-
-.. doxygentypedef:: arrow::SparseCSRMatrix
diff --git a/docs/source/cpp/api/utilities.rst b/docs/source/cpp/api/utilities.rst
deleted file mode 100644
index 87c5a3b..0000000
--- a/docs/source/cpp/api/utilities.rst
+++ /dev/null
@@ -1,52 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-=========
-Utilities
-=========
-
-Decimal Numbers
-===============
-
-.. doxygenclass:: arrow::Decimal128
-   :project: arrow_cpp
-   :members:
-
-Abstract Sequences
-==================
-
-.. doxygenclass:: arrow::Iterator
-   :project: arrow_cpp
-   :members:
-
-.. doxygenclass:: arrow::VectorIterator
-   :project: arrow_cpp
-   :members:
-
-Compression
-===========
-
-.. doxygenenum:: arrow::Compression::type
-
-.. doxygenclass:: arrow::util::Codec
-   :members:
-
-.. doxygenclass:: arrow::util::Compressor
-   :members:
-
-.. doxygenclass:: arrow::util::Decompressor
-   :members:
diff --git a/docs/source/cpp/arrays.rst b/docs/source/cpp/arrays.rst
deleted file mode 100644
index bd6ba64..0000000
--- a/docs/source/cpp/arrays.rst
+++ /dev/null
@@ -1,214 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-======
-Arrays
-======
-
-.. seealso::
-   :doc:`Array API reference <api/array>`
-
-The central type in Arrow is the class :class:`arrow::Array`.   An array
-represents a known-length sequence of values all having the same type.
-Internally, those values are represented by one or several buffers, the
-number and meaning of which depend on the array's data type, as documented
-in :doc:`the Arrow data layout specification <../format/Layout>`.
-
-Those buffers consist of the value data itself and an optional bitmap buffer
-that indicates which array entries are null values.  The bitmap buffer
-can be entirely omitted if the array is known to have zero null values.
-
-There are concrete subclasses of :class:`arrow::Array` for each data type,
-that help you access individual values of the array.
-
-Building an array
-=================
-
-As Arrow objects are immutable, there are classes provided that help you
-build these objects incrementally from third-party data.  These classes
-are organized in a hierarchy around the :class:`arrow::ArrayBuilder` base class,
-with concrete subclasses tailored for each particular data type.
-
-For example, to build an array of ``int64_t`` elements, we can use the
-:class:`arrow::Int64Builder` class. In the following example, we build an array
-of the range 1 to 8 where the element that should hold the value 4 is nulled::
-
-   arrow::Int64Builder builder;
-   builder.Append(1);
-   builder.Append(2);
-   builder.Append(3);
-   builder.AppendNull();
-   builder.Append(5);
-   builder.Append(6);
-   builder.Append(7);
-   builder.Append(8);
-
-   std::shared_ptr<arrow::Array> array;
-   arrow::Status st = builder.Finish(&array);
-   if (!st.ok()) {
-      // ... do something on array building failure
-   }
-
-The resulting Array (which can be casted to the concrete :class:`arrow::Int64Array`
-subclass if you want to access its values) then consists of two
-:class:`arrow::Buffer`\s.
-The first buffer holds the null bitmap, which consists here of a single byte with
-the bits ``1|1|1|1|0|1|1|1``. As we use  `least-significant bit (LSB) numbering`_.
-this indicates that the fourth entry in the array is null. The second
-buffer is simply an ``int64_t`` array containing all the above values.
-As the fourth entry is null, the value at that position in the buffer is
-undefined.
-
-Here is how you could access the concrete array's contents::
-
-   // Cast the Array to its actual type to access its data
-   auto int64_array = std::static_pointer_cast<arrow::Int64Array>(array);
-
-   // Get the pointer to the null bitmap.
-   const uint8_t* null_bitmap = int64_array->null_bitmap_data();
-
-   // Get the pointer to the actual data
-   const int64_t* data = int64_array->raw_values();
-
-   // Alternatively, given an array index, query its null bit and value directly
-   int64_t index = 2;
-   if (!int64_array->IsNull(index)) {
-      int64_t value = int64_array->Value(index);
-   }
-
-.. note::
-   :class:`arrow::Int64Array` (respectively :class:`arrow::Int64Builder`) is
-   just a ``typedef``, provided for convenience, of ``arrow::NumericArray<Int64Type>``
-   (respectively ``arrow::NumericBuilder<Int64Type>``).
-
-.. _least-significant bit (LSB) numbering: https://en.wikipedia.org/wiki/Bit_numbering
-
-Performance
------------
-
-While it is possible to build an array value-by-value as in the example above,
-to attain highest performance it is recommended to use the bulk appending
-methods (usually named ``AppendValues``) in the concrete :class:`arrow::ArrayBuilder`
-subclasses.
-
-If you know the number of elements in advance, it is also recommended to
-presize the working area by calling the :func:`~arrow::ArrayBuilder::Resize`
-or :func:`~arrow::ArrayBuilder::Reserve` methods.
-
-Here is how one could rewrite the above example to take advantage of those
-APIs::
-
-   arrow::Int64Builder builder;
-   // Make place for 8 values in total
-   builder.Resize(8);
-   // Bulk append the given values (with a null in 4th place as indicated by the
-   // validity vector)
-   std::vector<bool> validity = {true, true, true, false, true, true, true, true};
-   std::vector<int64_t> values = {1, 2, 3, 0, 5, 6, 7, 8};
-   builder.AppendValues(values, validity);
-
-   std::shared_ptr<arrow::Array> array;
-   arrow::Status st = builder.Finish(&array);
-
-If you still must append values one by one, some concrete builder subclasses
-have methods marked "Unsafe" that assume the working area has been correctly
-presized, and offer higher performance in exchange::
-
-   arrow::Int64Builder builder;
-   // Make place for 8 values in total
-   builder.Resize(8);
-   builder.UnsafeAppend(1);
-   builder.UnsafeAppend(2);
-   builder.UnsafeAppend(3);
-   builder.UnsafeAppendNull();
-   builder.UnsafeAppend(5);
-   builder.UnsafeAppend(6);
-   builder.UnsafeAppend(7);
-   builder.UnsafeAppend(8);
-
-   std::shared_ptr<arrow::Array> array;
-   arrow::Status st = builder.Finish(&array);
-
-
-Size Limitations and Recommendations
-====================================
-
-Some array types are structurally limited to 32-bit sizes.  This is the case
-for list arrays (which can hold up to 2^31 elements), string arrays and binary
-arrays (which can hold up to 2GB of binary data), at least.  Some other array
-types can hold up to 2^63 elements in the C++ implementation, but other Arrow
-implementations can have a 32-bit size limitation for those array types as well.
-
-For these reasons, it is recommended that huge data be chunked in subsets of
-more reasonable size.
-
-Chunked Arrays
-==============
-
-A :class:`arrow::ChunkedArray` is, like an array, a logical sequence of values;
-but unlike a simple array, a chunked array does not require the entire sequence
-to be physically contiguous in memory.  Also, the constituents of a chunked array
-need not have the same size, but they must all have the same data type.
-
-A chunked array is constructed by aggregating any number of arrays.  Here we'll
-build a chunked array with the same logical values as in the example above,
-but in two separate chunks::
-
-   std::vector<std::shared_ptr<arrow::Array>> chunks;
-   std::shared_ptr<arrow::Array> array;
-
-   // Build first chunk
-   arrow::Int64Builder builder;
-   builder.Append(1);
-   builder.Append(2);
-   builder.Append(3);
-   if (!builder.Finish(&array).ok()) {
-      // ... do something on array building failure
-   }
-   chunks.push_back(std::move(array));
-
-   // Build second chunk
-   builder.Reset();
-   builder.AppendNull();
-   builder.Append(5);
-   builder.Append(6);
-   builder.Append(7);
-   builder.Append(8);
-   if (!builder.Finish(&array).ok()) {
-      // ... do something on array building failure
-   }
-   chunks.push_back(std::move(array));
-
-   auto chunked_array = std::make_shared<arrow::ChunkedArray>(std::move(chunks));
-
-   assert(chunked_array->num_chunks() == 2);
-   // Logical length in number of values
-   assert(chunked_array->length() == 8);
-   assert(chunked_array->null_count() == 1);
-
-Slicing
-=======
-
-Like for physical memory buffers, it is possible to make zero-copy slices
-of arrays and chunked arrays, to obtain an array or chunked array referring
-to some logical subsequence of the data.  This is done by calling the
-:func:`arrow::Array::Slice` and :func:`arrow::ChunkedArray::Slice` methods,
-respectively.
-
diff --git a/docs/source/cpp/cmake.rst b/docs/source/cpp/cmake.rst
deleted file mode 100644
index f192988..0000000
--- a/docs/source/cpp/cmake.rst
+++ /dev/null
@@ -1,72 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-Using Arrow C++ in your own project
-===================================
-
-This section assumes you already have the Arrow C++ libraries on your
-system, either after installing them using a package manager or after
-:ref:`building them yourself <building-arrow-cpp>`.
-
-The recommended way to integrate the Arrow C++ libraries in your own C++
-project is to use CMake's
-`find_package <https://cmake.org/cmake/help/latest/command/find_package.html>`_
-function for locating and integrating dependencies.
-
-Basic usage
------------
-
-This minimal ``CMakeLists.txt`` file compiles a ``my_example.cc`` source
-file into an executable linked with the Arrow C++ shared library:
-
-.. code-block:: cmake
-
-   project(MyExample)
-
-   find_package(Arrow REQUIRED)
-
-   add_executable(my_example my_example.cc)
-   target_link_libraries(my_example PRIVATE arrow_shared)
-
-Available variables and targets
--------------------------------
-
-The directive ``find_package(Arrow REQUIRED)`` asks CMake to find an Arrow
-C++ installation on your system.  When it returns, it will have set a few
-CMake variables:
-
-* ``${Arrow_FOUND}`` is true if the Arrow C++ libraries have been found
-* ``${ARROW_VERSION}`` contains the Arrow version string
-* ``${ARROW_FULL_SO_VERSION}`` contains the Arrow DLL version string
-
-In addition, it will have created some targets that you can link against
-(note these are plain strings, not variables):
-
-* ``arrow_shared`` links to the Arrow shared libraries
-* ``arrow_static`` links to the Arrow static libraries
-
-In most cases, it is recommended to use the Arrow shared libraries.
-
-.. note::
-   CMake is case-sensitive.  The names and variables listed above have to be
-   spelt exactly that way!
-
-.. seealso::
-   A Docker-based :doc:`minimal build example <examples/cmake_minimal_build>`.
diff --git a/docs/source/cpp/compute.rst b/docs/source/cpp/compute.rst
deleted file mode 100644
index 92ac888..0000000
--- a/docs/source/cpp/compute.rst
+++ /dev/null
@@ -1,833 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-.. cpp:namespace:: arrow::compute
-
-=================
-Compute Functions
-=================
-
-The generic Compute API
-=======================
-
-.. TODO: describe API and how to invoke compute functions
-
-Functions and function registry
--------------------------------
-
-Functions represent compute operations over inputs of possibly varying
-types.  Internally, a function is implemented by one or several
-"kernels", depending on the concrete input types (for example, a function
-adding values from two inputs can have different kernels depending on
-whether the inputs are integral or floating-point).
-
-Functions are stored in a global :class:`FunctionRegistry` where
-they can be looked up by name.
-
-Input shapes
-------------
-
-Computation inputs are represented as a general :class:`Datum` class,
-which is a tagged union of several shapes of data such as :class:`Scalar`,
-:class:`Array` and :class:`ChunkedArray`.  Many compute functions support
-both array (chunked or not) and scalar inputs, however some will mandate
-either.  For example, the ``fill_null`` function requires its second input
-to be a scalar, while ``sort_indices`` requires its first and only input to
-be an array.
-
-Invoking functions
-------------------
-
-Compute functions can be invoked by name using
-:func:`arrow::compute::CallFunction`::
-
-   std::shared_ptr<arrow::Array> numbers_array = ...;
-   std::shared_ptr<arrow::Scalar> increment = ...;
-   arrow::Datum incremented_datum;
-
-   ARROW_ASSIGN_OR_RAISE(incremented_datum,
-                         arrow::compute::CallFunction("add", {numbers_array, increment}));
-   std::shared_ptr<Array> incremented_array = std::move(incremented_datum).array();
-
-(note this example uses implicit conversion from ``std::shared_ptr<Array>``
-to ``Datum``)
-
-Many compute functions are also available directly as concrete APIs, here
-:func:`arrow::compute::Add`::
-
-   std::shared_ptr<arrow::Array> numbers_array = ...;
-   std::shared_ptr<arrow::Scalar> increment = ...;
-   arrow::Datum incremented_datum;
-
-   ARROW_ASSIGN_OR_RAISE(incremented_datum,
-                         arrow::compute::Add(numbers_array, increment));
-   std::shared_ptr<Array> incremented_array = std::move(incremented_datum).array();
-
-Some functions accept or require an options structure that determines the
-exact semantics of the function::
-
-   MinMaxOptions min_max_options;
-   min_max_options.null_handling = MinMaxOptions::EMIT_NULL;
-
-   std::shared_ptr<arrow::Array> array = ...;
-   arrow::Datum min_max;
-
-   ARROW_ASSIGN_OR_RAISE(min_max,
-                         arrow::compute::CallFunction("min_max", {array},
-                                                      &min_max_options));
-
-   // Unpack struct scalar result (a two-field {"min", "max"} scalar)
-   std::shared_ptr<arrow::Scalar> min_value, max_value;
-   min_value = min_max.scalar_as<arrow::StructScalar>().value[0];
-   max_value = min_max.scalar_as<arrow::StructScalar>().value[1];
-
-.. seealso::
-   :doc:`Compute API reference <api/compute>`
-
-Implicit casts
-==============
-
-Functions may require conversion of their arguments before execution if a
-kernel does not match the argument types precisely. For example comparison
-of dictionary encoded arrays is not directly supported by any kernel, but an
-implicit cast can be made allowing comparison against the decoded array.
-
-Each function may define implicit cast behaviour as appropriate. For example
-comparison and arithmetic kernels require identically typed arguments, and
-support execution against differing numeric types by promoting their arguments
-to numeric type which can accommodate any value from either input.
-
-.. _common-numeric-type:
-
-Common numeric type
--------------------
-
-The common numeric type of a set of input numeric types is the smallest numeric
-type which can accommodate any value of any input. If any input is a floating
-point type the common numeric type is the widest floating point type among the
-inputs. Otherwise the common numeric type is integral and is signed if any input
-is signed. For example:
-
-+-------------------+----------------------+------------------------------------------------+
-| Input types       | Common numeric type  | Notes                                          |
-+===================+======================+================================================+
-| int32, int32      | int32                |                                                |
-+-------------------+----------------------+------------------------------------------------+
-| int16, int32      | int32                | Max width is 32, promote LHS to int32          |
-+-------------------+----------------------+------------------------------------------------+
-| uint16, int32     | int32                | One input signed, override unsigned            |
-+-------------------+----------------------+------------------------------------------------+
-| uint32, int32     | int64                | Widen to accommodate range of uint32           |
-+-------------------+----------------------+------------------------------------------------+
-| uint16, uint32    | uint32               | All inputs unsigned, maintain unsigned         |
-+-------------------+----------------------+------------------------------------------------+
-| int16, uint32     | int64                |                                                |
-+-------------------+----------------------+------------------------------------------------+
-| uint64, int16     | int64                | int64 cannot accommodate all uint64 values     |
-+-------------------+----------------------+------------------------------------------------+
-| float32, int32    | float32              | Promote RHS to float32                         |
-+-------------------+----------------------+------------------------------------------------+
-| float32, float64  | float64              |                                                |
-+-------------------+----------------------+------------------------------------------------+
-| float32, int64    | float32              | int64 is wider, still promotes to float32      |
-+-------------------+----------------------+------------------------------------------------+
-
-In particulary, note that comparing a ``uint64`` column to an ``int16`` column
-may emit an error if one of the ``uint64`` values cannot be expressed as the
-common type ``int64`` (for example, ``2 ** 63``).
-
-.. _compute-function-list:
-
-Available functions
-===================
-
-Type categories
----------------
-
-To avoid exhaustively listing supported types, the tables below use a number
-of general type categories:
-
-* "Numeric": Integer types (Int8, etc.) and Floating-point types (Float32,
-  Float64, sometimes Float16).  Some functions also accept Decimal128 and
-  Decimal256 input.
-
-* "Temporal": Date types (Date32, Date64), Time types (Time32, Time64),
-  Timestamp, Duration, Interval.
-
-* "Binary-like": Binary, LargeBinary, sometimes also FixedSizeBinary.
-
-* "String-like": String, LargeString.
-
-* "List-like": List, LargeList, sometimes also FixedSizeList.
-
-If you are unsure whether a function supports a concrete input type, we
-recommend you try it out.  Unsupported input types return a ``TypeError``
-:class:`Status`.
-
-Aggregations
-------------
-
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| Function name            | Arity      | Input types        | Output type           | Options class                              |
-+==========================+============+====================+=======================+============================================+
-| all                      | Unary      | Boolean            | Scalar Boolean        |                                            |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| any                      | Unary      | Boolean            | Scalar Boolean        |                                            |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| count                    | Unary      | Any                | Scalar Int64          | :struct:`CountOptions`                     |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| mean                     | Unary      | Numeric            | Scalar Float64        |                                            |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| min_max                  | Unary      | Numeric            | Scalar Struct  (1)    | :struct:`MinMaxOptions`                    |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| mode                     | Unary      | Numeric            | Struct  (2)           | :struct:`ModeOptions`                      |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| quantile                 | Unary      | Numeric            | Scalar Numeric (3)    | :struct:`QuantileOptions`                  |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| stddev                   | Unary      | Numeric            | Scalar Float64        | :struct:`VarianceOptions`                  |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| sum                      | Unary      | Numeric            | Scalar Numeric (4)    |                                            |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| tdigest                  | Unary      | Numeric            | Scalar Float64        | :struct:`TDigestOptions`                   |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| variance                 | Unary      | Numeric            | Scalar Float64        | :struct:`VarianceOptions`                  |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-
-Notes:
-
-* \(1) Output is a ``{"min": input type, "max": input type}`` Struct.
-
-* \(2) Output is an array of ``{"mode": input type, "count": Int64}`` Struct.
-  It contains the *N* most common elements in the input, in descending
-  order, where *N* is given in :member:`ModeOptions::n`.
-  If two values have the same count, the smallest one comes first.
-  Note that the output can have less than *N* elements if the input has
-  less than *N* distinct values.
-
-* \(3) Output is Float64 or input type, depending on QuantileOptions.
-
-* \(4) Output is Int64, UInt64 or Float64, depending on the input type.
-
-Element-wise ("scalar") functions
----------------------------------
-
-All element-wise functions accept both arrays and scalars as input.  The
-semantics for unary functions are as follow:
-
-* scalar inputs produce a scalar output
-* array inputs produce an array output
-
-Binary functions have the following semantics (which is sometimes called
-"broadcasting" in other systems such as NumPy):
-
-* ``(scalar, scalar)`` inputs produce a scalar output
-* ``(array, array)`` inputs produce an array output (and both inputs must
-  be of the same length)
-* ``(scalar, array)`` and ``(array, scalar)`` produce an array output.
-  The scalar input is handled as if it were an array of the same length N
-  as the other input, with the same value repeated N times.
-
-Arithmetic functions
-~~~~~~~~~~~~~~~~~~~~
-
-These functions expect two inputs of numeric type and apply a given binary
-operation to each pair of elements gathered from the inputs.  If any of the
-input elements in a pair is null, the corresponding output element is null.
-Inputs will be cast to the :ref:`common numeric type <common-numeric-type>`
-(and dictionary decoded, if applicable) before the operation is applied.
-
-The default variant of these functions does not detect overflow (the result
-then typically wraps around).  Each function is also available in an
-overflow-checking variant, suffixed ``_checked``, which returns
-an ``Invalid`` :class:`Status` when overflow is detected.
-
-+--------------------------+------------+--------------------+---------------------+
-| Function name            | Arity      | Input types        | Output type         |
-+==========================+============+====================+=====================+
-| add                      | Binary     | Numeric            | Numeric             |
-+--------------------------+------------+--------------------+---------------------+
-| add_checked              | Binary     | Numeric            | Numeric             |
-+--------------------------+------------+--------------------+---------------------+
-| divide                   | Binary     | Numeric            | Numeric             |
-+--------------------------+------------+--------------------+---------------------+
-| divide_checked           | Binary     | Numeric            | Numeric             |
-+--------------------------+------------+--------------------+---------------------+
-| power                    | Binary     | Numeric            | Numeric             |
-+--------------------------+------------+--------------------+---------------------+
-| power_checked            | Binary     | Numeric            | Numeric             |
-+--------------------------+------------+--------------------+---------------------+
-| multiply                 | Binary     | Numeric            | Numeric             |
-+--------------------------+------------+--------------------+---------------------+
-| multiply_checked         | Binary     | Numeric            | Numeric             |
-+--------------------------+------------+--------------------+---------------------+
-| subtract                 | Binary     | Numeric            | Numeric             |
-+--------------------------+------------+--------------------+---------------------+
-| subtract_checked         | Binary     | Numeric            | Numeric             |
-+--------------------------+------------+--------------------+---------------------+
-
-Comparisons
-~~~~~~~~~~~
-
-These functions expect two inputs of numeric type (in which case they will be
-cast to the :ref:`common numeric type <common-numeric-type>` before comparison),
-or two inputs of Binary- or String-like types, or two inputs of Temporal types.
-If any input is dictionary encoded it will be expanded for the purposes of
-comparison. If any of the input elements in a pair is null, the corresponding
-output element is null.
-
-+--------------------------+------------+---------------------------------------------+---------------------+
-| Function names           | Arity      | Input types                                 | Output type         |
-+==========================+============+=============================================+=====================+
-| equal, not_equal         | Binary     | Numeric, Temporal, Binary- and String-like  | Boolean             |
-+--------------------------+------------+---------------------------------------------+---------------------+
-| greater, greater_equal,  | Binary     | Numeric, Temporal, Binary- and String-like  | Boolean             |
-| less, less_equal         |            |                                             |                     |
-+--------------------------+------------+---------------------------------------------+---------------------+
-
-Logical functions
-~~~~~~~~~~~~~~~~~~
-
-The normal behaviour for these functions is to emit a null if any of the
-inputs is null (similar to the semantics of ``NaN`` in floating-point
-computations).
-
-Some of them are also available in a `Kleene logic`_ variant (suffixed
-``_kleene``) where null is taken to mean "undefined".  This is the
-interpretation of null used in SQL systems as well as R and Julia,
-for example.
-
-For the Kleene logic variants, therefore:
-
-* "true AND null", "null AND true" give "null" (the result is undefined)
-* "true OR null", "null OR true" give "true"
-* "false AND null", "null AND false" give "false"
-* "false OR null", "null OR false" give "null" (the result is undefined)
-
-+--------------------------+------------+--------------------+---------------------+
-| Function name            | Arity      | Input types        | Output type         |
-+==========================+============+====================+=====================+
-| and                      | Binary     | Boolean            | Boolean             |
-+--------------------------+------------+--------------------+---------------------+
-| and_not                  | Binary     | Boolean            | Boolean             |
-+--------------------------+------------+--------------------+---------------------+
-| and_kleene               | Binary     | Boolean            | Boolean             |
-+--------------------------+------------+--------------------+---------------------+
-| and_not_kleene           | Binary     | Boolean            | Boolean             |
-+--------------------------+------------+--------------------+---------------------+
-| invert                   | Unary      | Boolean            | Boolean             |
-+--------------------------+------------+--------------------+---------------------+
-| or                       | Binary     | Boolean            | Boolean             |
-+--------------------------+------------+--------------------+---------------------+
-| or_kleene                | Binary     | Boolean            | Boolean             |
-+--------------------------+------------+--------------------+---------------------+
-| xor                      | Binary     | Boolean            | Boolean             |
-+--------------------------+------------+--------------------+---------------------+
-
-.. _Kleene logic: https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_and_Priest_logics
-
-String predicates
-~~~~~~~~~~~~~~~~~
-
-These functions classify the input string elements according to their character
-contents.  An empty string element emits false in the output.  For ASCII
-variants of the functions (prefixed ``ascii_``), a string element with non-ASCII
-characters emits false in the output.
-
-The first set of functions operates on a character-per-character basis,
-and emit true in the output if the input contains only characters of a
-given class:
-
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| Function name            | Arity      | Input types        | Output type    | Matched character class          |
-+==========================+============+====================+================+==================================+
-| ascii_is_alnum           | Unary      | String-like        | Boolean        | Alphanumeric ASCII               |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| ascii_is_alpha           | Unary      | String-like        | Boolean        | Alphabetic ASCII                 |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| ascii_is_decimal         | Unary      | String-like        | Boolean        | Decimal ASCII \(1)               |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| ascii_is_lower           | Unary      | String-like        | Boolean        | Lowercase ASCII \(2)             |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| ascii_is_printable       | Unary      | String-like        | Boolean        | Printable ASCII                  |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| ascii_is_space           | Unary      | String-like        | Boolean        | Whitespace ASCII                 |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| ascii_is_upper           | Unary      | String-like        | Boolean        | Uppercase ASCII \(2)             |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| utf8_is_alnum            | Unary      | String-like        | Boolean        | Alphanumeric Unicode             |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| utf8_is_alpha            | Unary      | String-like        | Boolean        | Alphabetic Unicode               |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| utf8_is_decimal          | Unary      | String-like        | Boolean        | Decimal Unicode                  |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| utf8_is_digit            | Unary      | String-like        | Boolean        | Unicode digit \(3)               |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| utf8_is_lower            | Unary      | String-like        | Boolean        | Lowercase Unicode \(2)           |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| utf8_is_numeric          | Unary      | String-like        | Boolean        | Numeric Unicode \(4)             |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| utf8_is_printable        | Unary      | String-like        | Boolean        | Printable Unicode                |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| utf8_is_space            | Unary      | String-like        | Boolean        | Whitespace Unicode               |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-| utf8_is_upper            | Unary      | String-like        | Boolean        | Uppercase Unicode \(2)           |
-+--------------------------+------------+--------------------+----------------+----------------------------------+
-
-* \(1) Also matches all numeric ASCII characters and all ASCII digits.
-
-* \(2) Non-cased characters, such as punctuation, do not match.
-
-* \(3) This is currently the same as ``utf8_is_decimal``.
-
-* \(4) Unlike ``utf8_is_decimal``, non-decimal numeric characters also match.
-
-The second set of functions also consider the character order in a string
-element:
-
-+--------------------------+------------+--------------------+---------------------+---------+
-| Function name            | Arity      | Input types        | Output type         | Notes   |
-+==========================+============+====================+=====================+=========+
-| ascii_is_title           | Unary      | String-like        | Boolean             | \(1)    |
-+--------------------------+------------+--------------------+---------------------+---------+
-| utf8_is_title            | Unary      | String-like        | Boolean             | \(1)    |
-+--------------------------+------------+--------------------+---------------------+---------+
-
-* \(1) Output is true iff the input string element is title-cased, i.e. any
-  word starts with an uppercase character, followed by lowercase characters.
-  Word boundaries are defined by non-cased characters.
-
-The third set of functions examines string elements on a byte-per-byte basis:
-
-+--------------------------+------------+--------------------+---------------------+---------+
-| Function name            | Arity      | Input types        | Output type         | Notes   |
-+==========================+============+====================+=====================+=========+
-| string_is_ascii          | Unary      | String-like        | Boolean             | \(1)    |
-+--------------------------+------------+--------------------+---------------------+---------+
-
-* \(1) Output is true iff the input string element contains only ASCII characters,
-  i.e. only bytes in [0, 127].
-
-String transforms
-~~~~~~~~~~~~~~~~~
-
-+--------------------------+------------+-------------------------+---------------------+-------------------------------------------------+
-| Function name            | Arity      | Input types             | Output type         | Notes   | Options class                         |
-+==========================+============+=========================+=====================+=========+=======================================+
-| ascii_lower              | Unary      | String-like             | String-like         | \(1)    |                                       |
-+--------------------------+------------+-------------------------+---------------------+---------+---------------------------------------+
-| ascii_upper              | Unary      | String-like             | String-like         | \(1)    |                                       |
-+--------------------------+------------+-------------------------+---------------------+---------+---------------------------------------+
-| binary_length            | Unary      | Binary- or String-like  | Int32 or Int64      | \(2)    |                                       |
-+--------------------------+------------+-------------------------+---------------------+---------+---------------------------------------+
-| replace_substring        | Unary      | String-like             | String-like         | \(3)    | :struct:`ReplaceSubstringOptions`     |
-+--------------------------+------------+-------------------------+---------------------+---------+---------------------------------------+
-| replace_substring_regex  | Unary      | String-like             | String-like         | \(4)    | :struct:`ReplaceSubstringOptions`     |
-+--------------------------+------------+-------------------------+---------------------+---------+---------------------------------------+
-| utf8_length              | Unary      | String-like             | Int32 or Int64      | \(5)    |                                       |
-+--------------------------+------------+-------------------------+---------------------+---------+---------------------------------------+
-| utf8_lower               | Unary      | String-like             | String-like         | \(6)    |                                       |
-+--------------------------+------------+-------------------------+---------------------+---------+---------------------------------------+
-| utf8_upper               | Unary      | String-like             | String-like         | \(6)    |                                       |
-+--------------------------+------------+-------------------------+---------------------+---------+---------------------------------------+
-
-
-* \(1) Each ASCII character in the input is converted to lowercase or
-  uppercase.  Non-ASCII characters are left untouched.
-
-* \(2) Output is the physical length in bytes of each input element.  Output
-  type is Int32 for Binary / String, Int64 for LargeBinary / LargeString.
-
-* \(3) Replace non-overlapping substrings that match to
-  :member:`ReplaceSubstringOptions::pattern` by
-  :member:`ReplaceSubstringOptions::replacement`. If
-  :member:`ReplaceSubstringOptions::max_replacements` != -1, it determines the
-  maximum number of replacements made, counting from the left.
-
-* \(4) Replace non-overlapping substrings that match to the regular expression
-  :member:`ReplaceSubstringOptions::pattern` by
-  :member:`ReplaceSubstringOptions::replacement`, using the Google RE2 library. If
-  :member:`ReplaceSubstringOptions::max_replacements` != -1, it determines the
-  maximum number of replacements made, counting from the left. Note that if the
-  pattern contains groups, backreferencing can be used.
-
-* \(5) Output is the number of characters (not bytes) of each input element.
-  Output type is Int32 for String, Int64 for LargeString. 
-
-* \(6) Each UTF8-encoded character in the input is converted to lowercase or
-  uppercase.
-
-
-String trimming
-~~~~~~~~~~~~~~~
-
-These functions trim off characters on both sides (trim), or the left (ltrim) or right side (rtrim).
-
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| Function name            | Arity      | Input types             | Output type         | Options class                          | Notes   |
-+==========================+============+=========================+=====================+========================================+=========+
-| ascii_ltrim              | Unary      | String-like             | String-like         | :struct:`TrimOptions`                  | \(1)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| ascii_ltrim_whitespace   | Unary      | String-like             | String-like         |                                        | \(2)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| ascii_rtrim              | Unary      | String-like             | String-like         | :struct:`TrimOptions`                  | \(1)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| ascii_rtrim_whitespace   | Unary      | String-like             | String-like         |                                        | \(2)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| ascii_trim               | Unary      | String-like             | String-like         | :struct:`TrimOptions`                  | \(1)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| ascii_trim_whitespace    | Unary      | String-like             | String-like         |                                        | \(2)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| utf8_ltrim               | Unary      | String-like             | String-like         | :struct:`TrimOptions`                  | \(3)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| utf8_ltrim_whitespace    | Unary      | String-like             | String-like         |                                        | \(4)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| utf8_rtrim               | Unary      | String-like             | String-like         | :struct:`TrimOptions`                  | \(3)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| utf8_rtrim_whitespace    | Unary      | String-like             | String-like         |                                        | \(4)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| utf8_trim                | Unary      | String-like             | String-like         | :struct:`TrimOptions`                  | \(3)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-| utf8_trim_whitespace     | Unary      | String-like             | String-like         |                                        | \(4)    |
-+--------------------------+------------+-------------------------+---------------------+----------------------------------------+---------+
-
-* \(1) Only characters specified in :member:`TrimOptions::characters` will be
-  trimmed off. Both the input string and the `characters` argument are
-  interpreted as ASCII characters.
-
-* \(2) Only trim off ASCII whitespace characters (``'\t'``, ``'\n'``, ``'\v'``,
-  ``'\f'``, ``'\r'``  and ``' '``).
-
-* \(3) Only characters specified in :member:`TrimOptions::characters` will be
-  trimmed off.
-
-* \(4) Only trim off Unicode whitespace characters.
-
-
-Containment tests
-~~~~~~~~~~~~~~~~~
-
-+---------------------------+------------+------------------------------------+---------------+----------------------------------------+
-| Function name             | Arity      | Input types                        | Output type   | Options class                          |
-+===========================+============+====================================+===============+========================================+
-| match_substring           | Unary      | String-like                        | Boolean (1)   | :struct:`MatchSubstringOptions`        |
-+---------------------------+------------+------------------------------------+---------------+----------------------------------------+
-| match_substring_regex     | Unary      | String-like                        | Boolean (2)   | :struct:`MatchSubstringOptions`        |
-+---------------------------+------------+------------------------------------+---------------+----------------------------------------+
-| index_in                  | Unary      | Boolean, Null, Numeric, Temporal,  | Int32 (3)     | :struct:`SetLookupOptions`             |
-|                           |            | Binary- and String-like            |               |                                        |
-+---------------------------+------------+------------------------------------+---------------+----------------------------------------+
-| is_in                     | Unary      | Boolean, Null, Numeric, Temporal,  | Boolean (4)   | :struct:`SetLookupOptions`             |
-|                           |            | Binary- and String-like            |               |                                        |
-+---------------------------+------------+------------------------------------+---------------+----------------------------------------+
-
-* \(1) Output is true iff :member:`MatchSubstringOptions::pattern`
-  is a substring of the corresponding input element.
-
-* \(2) Output is true iff :member:`MatchSubstringOptions::pattern`
-  matches the corresponding input element at any position.
-
-* \(3) Output is the index of the corresponding input element in
-  :member:`SetLookupOptions::value_set`, if found there.  Otherwise,
-  output is null.
-
-* \(4) Output is true iff the corresponding input element is equal to one
-  of the elements in :member:`SetLookupOptions::value_set`.
-
-
-String splitting
-~~~~~~~~~~~~~~~~
-
-These functions split strings into lists of strings.  All kernels can optionally
-be configured with a ``max_splits`` and a ``reverse`` parameter, where
-``max_splits == -1`` means no limit (the default).  When ``reverse`` is true,
-the splitting is done starting from the end of the string; this is only relevant
-when a positive ``max_splits`` is given.
-
-+--------------------------+------------+-------------------------+-------------------+----------------------------------+---------+
-| Function name            | Arity      | Input types             | Output type       | Options class                    | Notes   |
-+==========================+============+=========================+===================+==================================+=========+
-| split_pattern            | Unary      | String-like             | List-like         | :struct:`SplitPatternOptions`    | \(1)    |
-+--------------------------+------------+-------------------------+-------------------+----------------------------------+---------+
-| utf8_split_whitespace    | Unary      | String-like             | List-like         | :struct:`SplitOptions`           | \(2)    |
-+--------------------------+------------+-------------------------+-------------------+----------------------------------+---------+
-| ascii_split_whitespace   | Unary      | String-like             | List-like         | :struct:`SplitOptions`           | \(3)    |
-+--------------------------+------------+-------------------------+-------------------+----------------------------------+---------+
-
-* \(1) The string is split when an exact pattern is found (the pattern itself
-  is not included in the output).
-
-* \(2) A non-zero length sequence of Unicode defined whitespace codepoints
-  is seen as separator.
-
-* \(3) A non-zero length sequence of ASCII defined whitespace bytes
-  (``'\t'``, ``'\n'``, ``'\v'``, ``'\f'``, ``'\r'``  and ``' '``) is seen
-  as separator.
-
-
-Structural transforms
-~~~~~~~~~~~~~~~~~~~~~
-
-.. XXX (this category is a bit of a hodgepodge)
-
-+--------------------------+------------+------------------------------------------------+---------------------+---------+
-| Function name            | Arity      | Input types                                    | Output type         | Notes   |
-+==========================+============+================================================+=====================+=========+
-| fill_null                | Binary     | Boolean, Null, Numeric, Temporal, String-like  | Input type          | \(1)    |
-+--------------------------+------------+------------------------------------------------+---------------------+---------+
-| is_nan                   | Unary      | Float, Double                                  | Boolean             | \(2)    |
-+--------------------------+------------+------------------------------------------------+---------------------+---------+
-| is_null                  | Unary      | Any                                            | Boolean             | \(3)    |
-+--------------------------+------------+------------------------------------------------+---------------------+---------+
-| is_valid                 | Unary      | Any                                            | Boolean             | \(4)    |
-+--------------------------+------------+------------------------------------------------+---------------------+---------+
-| list_value_length        | Unary      | List-like                                      | Int32 or Int64      | \(5)    |
-+--------------------------+------------+------------------------------------------------+---------------------+---------+
-| project                  | Varargs    | Any                                            | Struct              | \(6)    |
-+--------------------------+------------+------------------------------------------------+---------------------+---------+
-
-* \(1) First input must be an array, second input a scalar of the same type.
-  Output is an array of the same type as the inputs, and with the same values
-  as the first input, except for nulls replaced with the second input value.
-
-* \(2) Output is true iff the corresponding input element is NaN.
-
-* \(3) Output is true iff the corresponding input element is null.
-
-* \(4) Output is true iff the corresponding input element is non-null.
-
-* \(5) Each output element is the length of the corresponding input element
-  (null if input is null).  Output type is Int32 for List, Int64 for LargeList.
-
-* \(6) The output struct's field types are the types of its arguments. The
-  field names are specified using an instance of :struct:`ProjectOptions`.
-  The output shape will be scalar if all inputs are scalar, otherwise any
-  scalars will be broadcast to arrays.
-
-Conversions
-~~~~~~~~~~~
-
-A general conversion function named ``cast`` is provided which accepts a large
-number of input and output types.  The type to cast to can be passed in a
-:struct:`CastOptions` instance.  As an alternative, the same service is
-provided by a concrete function :func:`~arrow::compute::Cast`.
-
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| Function name            | Arity      | Input types        | Output type           | Options class                              |
-+==========================+============+====================+=======================+============================================+
-| cast                     | Unary      | Many               | Variable              | :struct:`CastOptions`                      |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-| strptime                 | Unary      | String-like        | Timestamp             | :struct:`StrptimeOptions`                  |
-+--------------------------+------------+--------------------+-----------------------+--------------------------------------------+
-
-The conversions available with ``cast`` are listed below.  In all cases, a
-null input value is converted into a null output value.
-
-**Truth value extraction**
-
-+-----------------------------+------------------------------------+--------------+
-| Input type                  | Output type                        | Notes        |
-+=============================+====================================+==============+
-| Binary- and String-like     | Boolean                            | \(1)         |
-+-----------------------------+------------------------------------+--------------+
-| Numeric                     | Boolean                            | \(2)         |
-+-----------------------------+------------------------------------+--------------+
-
-* \(1) Output is true iff the corresponding input value has non-zero length.
-
-* \(2) Output is true iff the corresponding input value is non-zero.
-
-**Same-kind conversion**
-
-+-----------------------------+------------------------------------+--------------+
-| Input type                  | Output type                        | Notes        |
-+=============================+====================================+==============+
-| Int32                       | 32-bit Temporal                    | \(1)         |
-+-----------------------------+------------------------------------+--------------+
-| Int64                       | 64-bit Temporal                    | \(1)         |
-+-----------------------------+------------------------------------+--------------+
-| (Large)Binary               | (Large)String                      | \(2)         |
-+-----------------------------+------------------------------------+--------------+
-| (Large)String               | (Large)Binary                      | \(3)         |
-+-----------------------------+------------------------------------+--------------+
-| Numeric                     | Numeric                            | \(4) \(5)    |
-+-----------------------------+------------------------------------+--------------+
-| 32-bit Temporal             | Int32                              | \(1)         |
-+-----------------------------+------------------------------------+--------------+
-| 64-bit Temporal             | Int64                              | \(1)         |
-+-----------------------------+------------------------------------+--------------+
-| Temporal                    | Temporal                           | \(4) \(5)    |
-+-----------------------------+------------------------------------+--------------+
-
-* \(1) No-operation cast: the raw values are kept identical, only
-  the type is changed.
-
-* \(2) Validates the contents if :member:`CastOptions::allow_invalid_utf8`
-  is false.
-
-* \(3) No-operation cast: only the type is changed.
-
-* \(4) Overflow and truncation checks are enabled depending on
-  the given :struct:`CastOptions`.
-
-* \(5) Not all such casts have been implemented.
-
-**String representations**
-
-+-----------------------------+------------------------------------+---------+
-| Input type                  | Output type                        | Notes   |
-+=============================+====================================+=========+
-| Boolean                     | String-like                        |         |
-+-----------------------------+------------------------------------+---------+
-| Numeric                     | String-like                        |         |
-+-----------------------------+------------------------------------+---------+
-
-**Generic conversions**
-
-+-----------------------------+------------------------------------+---------+
-| Input type                  | Output type                        | Notes   |
-+=============================+====================================+=========+
-| Dictionary                  | Dictionary value type              | \(1)    |
-+-----------------------------+------------------------------------+---------+
-| Extension                   | Extension storage type             |         |
-+-----------------------------+------------------------------------+---------+
-| List-like                   | List-like                          | \(2)    |
-+-----------------------------+------------------------------------+---------+
-| Null                        | Any                                |         |
-+-----------------------------+------------------------------------+---------+
-
-* \(1) The dictionary indices are unchanged, the dictionary values are
-  cast from the input value type to the output value type (if a conversion
-  is available).
-
-* \(2) The list offsets are unchanged, the list values are cast from the
-  input value type to the output value type (if a conversion is
-  available).
-
-
-Array-wise ("vector") functions
--------------------------------
-
-Associative transforms
-~~~~~~~~~~~~~~~~~~~~~~
-
-+--------------------------+------------+------------------------------------+----------------------------+
-| Function name            | Arity      | Input types                        | Output type                |
-+==========================+============+====================================+============================+
-| dictionary_encode        | Unary      | Boolean, Null, Numeric,            | Dictionary (1)             |
-|                          |            | Temporal, Binary- and String-like  |                            |
-+--------------------------+------------+------------------------------------+----------------------------+
-| unique                   | Unary      | Boolean, Null, Numeric,            | Input type (2)             |
-|                          |            | Temporal, Binary- and String-like  |                            |
-+--------------------------+------------+------------------------------------+----------------------------+
-| value_counts             | Unary      | Boolean, Null, Numeric,            | Input type (3)             |
-|                          |            | Temporal, Binary- and String-like  |                            |
-+--------------------------+------------+------------------------------------+----------------------------+
-
-* \(1) Output is ``Dictionary(Int32, input type)``.
-
-* \(2) Duplicates are removed from the output while the original order is
-  maintained.
-
-* \(3) Output is a ``{"values": input type, "counts": Int64}`` Struct.
-  Each output element corresponds to a unique value in the input, along
-  with the number of times this value has appeared.
-
-Selections
-~~~~~~~~~~
-
-These functions select a subset of the first input defined by the second input.
-
-+-----------------+------------+---------------+--------------+------------------+-------------------------+-------------+
-| Function name   | Arity      | Input type 1  | Input type 2 | Output type      | Options class           | Notes       |
-+=================+============+===============+==============+==================+=========================+=============+
-| filter          | Binary     | Any (1)       | Boolean      | Input type 1     | :struct:`FilterOptions` | \(2)        |
-+-----------------+------------+---------------+--------------+------------------+-------------------------+-------------+
-| take            | Binary     | Any (1)       | Integer      | Input type 1     | :struct:`TakeOptions`   | \(3)        |
-+-----------------+------------+---------------+--------------+------------------+-------------------------+-------------+
-
-* \(1) Unions are unsupported.
-
-* \(2) Each element in input 1 is appended to the output iff the corresponding
-  element in input 2 is true.
-
-* \(3) For each element *i* in input 2, the *i*'th element in input 1 is
-  appended to the output.
-
-Sorts and partitions
-~~~~~~~~~~~~~~~~~~~~
-
-In these functions, nulls are considered greater than any other value
-(they will be sorted or partitioned at the end of the array).
-Floating-point NaN values are considered greater than any other non-null
-value, but smaller than nulls.
-
-+-----------------------+------------+-------------------------+-------------------+--------------------------------+----------------+
-| Function name         | Arity      | Input types             | Output type       | Options class                  | Notes          |
-+=======================+============+=========================+===================+================================+================+
-| partition_nth_indices | Unary      | Binary- and String-like | UInt64            | :struct:`PartitionNthOptions`  | \(1) \(3)      |
-+-----------------------+------------+-------------------------+-------------------+--------------------------------+----------------+
-| partition_nth_indices | Unary      | Numeric                 | UInt64            | :struct:`PartitionNthOptions`  | \(1)           |
-+-----------------------+------------+-------------------------+-------------------+--------------------------------+----------------+
-| array_sort_indices    | Unary      | Binary- and String-like | UInt64            | :struct:`ArraySortOptions`     | \(2) \(3) \(4) |
-+-----------------------+------------+-------------------------+-------------------+--------------------------------+----------------+
-| array_sort_indices    | Unary      | Numeric                 | UInt64            | :struct:`ArraySortOptions`     | \(2) \(4)      |
-+-----------------------+------------+-------------------------+-------------------+--------------------------------+----------------+
-| sort_indices          | Unary      | Binary- and String-like | UInt64            | :struct:`SortOptions`          | \(2) \(3) \(5) |
-+-----------------------+------------+-------------------------+-------------------+--------------------------------+----------------+
-| sort_indices          | Unary      | Numeric                 | UInt64            | :struct:`SortOptions`          | \(2) \(5)      |
-+-----------------------+------------+-------------------------+-------------------+--------------------------------+----------------+
-
-* \(1) The output is an array of indices into the input array, that define
-  a partial non-stable sort such that the *N*'th index points to the *N*'th
-  element in sorted order, and all indices before the *N*'th point to
-  elements less or equal to elements at or after the *N*'th (similar to
-  :func:`std::nth_element`).  *N* is given in
-  :member:`PartitionNthOptions::pivot`.
-
-* \(2) The output is an array of indices into the input, that define a
-  stable sort of the input.
-
-* \(3) Input values are ordered lexicographically as bytestrings (even
-  for String arrays).
-
-* \(4) The input must be an array. The default order is ascending.
-
-* \(5) The input can be an array, chunked array, record batch or
-  table. If the input is a record batch or table, one or more sort
-  keys must be specified.
-
-Structural transforms
-~~~~~~~~~~~~~~~~~~~~~
-
-+--------------------------+------------+--------------------+---------------------+---------+
-| Function name            | Arity      | Input types        | Output type         | Notes   |
-+==========================+============+====================+=====================+=========+
-| list_flatten             | Unary      | List-like          | List value type     | \(1)    |
-+--------------------------+------------+--------------------+---------------------+---------+
-| list_parent_indices      | Unary      | List-like          | Int32 or Int64      | \(2)    |
-+--------------------------+------------+--------------------+---------------------+---------+
-
-* \(1) The top level of nesting is removed: all values in the list child array,
-  including nulls, are appended to the output.  However, nulls in the parent
-  list array are discarded.
-
-* \(2) For each value in the list child array, the index at which it is found
-  in the list array is appended to the output.  Nulls in the parent list array
-  are discarded.
-
diff --git a/docs/source/cpp/conventions.rst b/docs/source/cpp/conventions.rst
deleted file mode 100644
index 218d028..0000000
--- a/docs/source/cpp/conventions.rst
+++ /dev/null
@@ -1,107 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-.. cpp:namespace:: arrow
-
-Conventions
-===========
-
-The Arrow C++ API follows a few simple guidelines.  As with many rules,
-there may be exceptions.
-
-Language version
-----------------
-
-Arrow is C++11-compatible.  A few backports are used for newer functionality,
-for example the :class:`std::string_view` class.
-
-Namespacing
------------
-
-All the Arrow API (except macros) is namespaced inside a ``arrow`` namespace,
-and nested namespaces thereof.
-
-Safe pointers
--------------
-
-Arrow objects are usually passed and stored using safe pointers -- most of
-the time :class:`std::shared_ptr` but sometimes also :class:`std::unique_ptr`.
-
-Immutability
-------------
-
-Many Arrow objects are immutable: once constructed, their logical properties
-cannot change anymore.  This makes it possible to use them in multi-threaded
-scenarios without requiring tedious and error-prone synchronization.
-
-There are obvious exceptions to this, such as IO objects or mutable data buffers.
-
-Error reporting
----------------
-
-Most APIs indicate a successful or erroneous outcome by returning a
-:class:`arrow::Status` instance.  Arrow doesn't throw exceptions of its
-own, but third-party exceptions might propagate through, especially
-:class:`std::bad_alloc` (but Arrow doesn't use the standard allocators for
-large data).
-
-When an API can return either an error code or a successful value, it usually
-does so by returning the template class
-:class:`arrow::Result <template\<class T\> arrow::Result>`.  However,
-some APIs (usually deprecated) return :class:`arrow::Status` and pass the
-result value as an out-pointer parameter.
-
-Here is an example of checking the outcome of an operation::
-
-   const int64_t buffer_size = 4096;
-
-   auto maybe_buffer = arrow::AllocateBuffer(buffer_size, &buffer);
-   if (!maybe_buffer.ok()) {
-      // ... handle error
-   } else {
-      std::shared_ptr<arrow::Buffer> buffer = *maybe_buffer;
-      // ... use allocated buffer
-   }
-
-If the caller function itself returns a :class:`arrow::Result` or
-:class:`arrow::Status` and wants to propagate any non-successful outcome, two
-convenience macros are available:
-
-* :c:macro:`ARROW_RETURN_NOT_OK` takes a :class:`arrow::Status` parameter
-  and returns it if not successful.
-
-* :c:macro:`ARROW_ASSIGN_OR_RAISE` takes a :class:`arrow::Result` parameter,
-  assigns its result to a *lvalue* if successful, or returns the corresponding
-  :class:`arrow::Status` on error.
-
-For example::
-
-   arrow::Status DoSomething() {
-      const int64_t buffer_size = 4096;
-      std::shared_ptr<arrow::Buffer> buffer;
-      ARROW_ASSIGN_OR_RAISE(buffer, arrow::AllocateBuffer(buffer_size));
-      // ... allocation successful, do something with buffer below
-
-      // return success at the end
-      return Status::OK();
-   }
-
-.. seealso::
-   :doc:`API reference for error reporting <api/support>`
diff --git a/docs/source/cpp/csv.rst b/docs/source/cpp/csv.rst
deleted file mode 100644
index 123bc8a..0000000
--- a/docs/source/cpp/csv.rst
+++ /dev/null
@@ -1,172 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-.. cpp:namespace:: arrow::csv
-
-=================
-Reading CSV files
-=================
-
-Arrow provides a fast CSV reader allowing ingestion of external data
-as Arrow tables.
-
-.. seealso::
-   :ref:`CSV reader API reference <cpp-api-csv>`.
-
-Basic usage
-===========
-
-A CSV file is read from a :class:`~arrow::io::InputStream`.
-
-.. code-block:: cpp
-
-   #include "arrow/csv/api.h"
-
-   {
-      // ...
-      arrow::io::IOContext io_context = arrow::io::default_io_context();
-      std::shared_ptr<arrow::io::InputStream> input = ...;
-
-      auto read_options = arrow::csv::ReadOptions::Defaults();
-      auto parse_options = arrow::csv::ParseOptions::Defaults();
-      auto convert_options = arrow::csv::ConvertOptions::Defaults();
-
-      // Instantiate TableReader from input stream and options
-      auto maybe_reader =
-        arrow::csv::TableReader::Make(io_context,
-                                      input,
-                                      read_options,
-                                      parse_options,
-                                      convert_options);
-      if (!maybe_reader.ok()) {
-         // Handle TableReader instantiation error...
-      }
-      std::shared_ptr<arrow::csv::TableReader> reader = *maybe_reader;
-
-      // Read table from CSV file
-      auto maybe_table = reader->Read();
-      if (!maybe_table.ok()) {
-         // Handle CSV read error
-         // (for example a CSV syntax error or failed type conversion)
-      }
-      std::shared_ptr<arrow::Table> table = *maybe_table;
-   }
-
-Column names
-============
-
-There are three possible ways to infer column names from the CSV file:
-
-* By default, the column names are read from the first row in the CSV file
-* If :member:`ReadOptions::column_names` is set, it forces the column
-  names in the table to these values (the first row in the CSV file is
-  read as data)
-* If :member:`ReadOptions::autogenerate_column_names` is true, column names
-  will be autogenerated with the pattern "f0", "f1"... (the first row in the
-  CSV file is read as data)
-
-Column selection
-================
-
-By default, Arrow reads all columns in the CSV file.  You can narrow the
-selection of columns with the :member:`ConvertOptions::include_columns`
-option.  If some columns in :member:`ConvertOptions::include_columns`
-are missing from the CSV file, an error will be emitted unless
-:member:`ConvertOptions::include_missing_columns` is true, in which case
-the missing columns are assumed to contain all-null values.
-
-Interaction with column names
------------------------------
-
-If both :member:`ReadOptions::column_names` and
-:member:`ConvertOptions::include_columns` are specified,
-the :member:`ReadOptions::column_names` are assumed to map to CSV columns,
-and :member:`ConvertOptions::include_columns` is a subset of those column
-names that will part of the Arrow Table.
-
-Data types
-==========
-
-By default, the CSV reader infers the most appropriate data type for each
-column.  Type inference considers the following data types, in order:
-
-* Null
-* Int64
-* Boolean
-* Date32
-* Timestamp (with seconds unit)
-* Timestamp (with nanoseconds unit)
-* Float64
-* Dictionary<String> (if :member:`ConvertOptions::auto_dict_encode` is true)
-* Dictionary<Binary> (if :member:`ConvertOptions::auto_dict_encode` is true)
-* String
-* Binary
-
-It is possible to override type inference for select columns by setting
-the :member:`ConvertOptions::column_types` option.  Explicit data types
-can be chosen from the following list:
-
-* Null
-* All Integer types
-* Float32 and Float64
-* Decimal128
-* Boolean
-* Date32 and Date64
-* Timestamp
-* Binary and Large Binary
-* String and Large String (with optional UTF8 input validation)
-* Fixed-Size Binary
-* Dictionary with index type Int32 and value type one of the following:
-  Binary, String, LargeBinary, LargeString,  Int32, UInt32, Int64, UInt64,
-  Float32, Float64, Decimal128
-
-Other data types do not support conversion from CSV values and will error out.
-
-Dictionary inference
---------------------
-
-If type inference is enabled and :member:`ConvertOptions::auto_dict_encode`
-is true, the CSV reader first tries to convert string-like columns to a
-dictionary-encoded string-like array.  It switches to a plain string-like
-array when the threshold in :member:`ConvertOptions::auto_dict_max_cardinality`
-is reached.
-
-Nulls
------
-
-Null values are recognized from the spellings stored in
-:member:`ConvertOptions::null_values`.  The :func:`ConvertOptions::Defaults`
-factory method will initialize a number of conventional null spellings such
-as ``N/A``.
-
-Character encoding
-------------------
-
-CSV files are expected to be encoded in UTF8.  However, non-UTF8 data
-is accepted for Binary columns.
-
-Performance
-===========
-
-By default, the CSV reader will parallelize reads in order to exploit all
-CPU cores on your machine.  You can change this setting in
-:member:`ReadOptions::use_threads`.  A reasonable expectation is at least
-100 MB/s per core on a performant desktop or laptop computer (measured in
-source CSV bytes, not target Arrow data bytes).
diff --git a/docs/source/cpp/dataset.rst b/docs/source/cpp/dataset.rst
deleted file mode 100644
index be33e89..0000000
--- a/docs/source/cpp/dataset.rst
+++ /dev/null
@@ -1,403 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-================
-Tabular Datasets
-================
-
-.. seealso::
-   :doc:`Dataset API reference <api/dataset>`
-
-.. warning::
-
-    The ``arrow::dataset`` namespace is experimental, and a stable API
-    is not yet guaranteed.
-
-The Arrow Datasets library provides functionality to efficiently work with
-tabular, potentially larger than memory, and multi-file datasets. This includes:
-
-* A unified interface that supports different sources and file formats (currently,
-  Parquet, Feather / Arrow IPC, and CSV files) and different file systems (local,
-  cloud).
-* Discovery of sources (crawling directories, handling partitioned datasets with
-  various partitioning schemes, basic schema normalization, ...)
-* Optimized reading with predicate pushdown (filtering rows), projection
-  (selecting and deriving columns), and optionally parallel reading.
-
-The goal is to expand support to other file formats and data sources
-(e.g. database connections) in the future.
-
-Reading Datasets
-----------------
-
-For the examples below, let's create a small dataset consisting
-of a directory with two parquet files:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 50-85
-   :linenos:
-   :lineno-match:
-
-(See the full example at bottom: :ref:`cpp-dataset-full-example`.)
-
-Dataset discovery
-~~~~~~~~~~~~~~~~~
-
-A :class:`arrow::dataset::Dataset` object can be created using the various
-:class:`arrow::dataset::DatasetFactory` objects. Here, we'll use the
-:class:`arrow::dataset::FileSystemDatasetFactory`, which can create a dataset
-given a base directory path:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 151-165
-   :emphasize-lines: 6-11
-   :linenos:
-   :lineno-match:
-
-We're also passing the filesystem to use and the file format to use for reading.
-This lets us choose between (for example) reading local files or files in Amazon
-S3, or between Parquet and CSV.
-
-In addition to searching a base directory, we can list file paths manually.
-
-Creating a :class:`arrow::dataset::Dataset` does not begin reading the data
-itself. It only crawls the directory to find all the files (if needed), which can
-be retrieved with :func:`arrow::dataset::FileSystemDataset::files`:
-
-.. code-block:: cpp
-
-   // Print out the files crawled (only for FileSystemDataset)
-   for (const auto& filename : dataset->files()) {
-     std::cout << filename << std::endl;
-   }
-
-…and infers the dataset's schema (by default from the first file):
-
-.. code-block:: cpp
-
-   std::cout << dataset->schema()->ToString() << std::endl;
-
-Using the :func:`arrow::dataset::Dataset::NewScan` method, we can build a
-:class:`arrow::dataset::Scanner` and read the dataset (or a portion of it) into
-a :class:`arrow::Table` with the :func:`arrow::dataset::Scanner::ToTable`
-method:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 151-170
-   :emphasize-lines: 16-19
-   :linenos:
-   :lineno-match:
-
-.. TODO: iterative loading not documented pending API changes
-.. note:: Depending on the size of your dataset, this can require a lot of
-          memory; see :ref:`cpp-dataset-filtering-data` below on
-          filtering/projecting.
-
-Reading different file formats
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The above examples use Parquet files on local disk, but the Dataset API
-provides a consistent interface across multiple file formats and filesystems.
-(See :ref:`cpp-dataset-cloud-storage` for more information on the latter.)
-Currently, Parquet, Feather / Arrow IPC, and CSV file formats are supported;
-more formats are planned in the future.
-
-If we save the table as Feather files instead of Parquet files:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 87-104
-   :linenos:
-   :lineno-match:
-
-…then we can read the Feather file by passing an :class:`arrow::dataset::IpcFileFormat`:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 318,334
-   :linenos:
-
-Customizing file formats
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-:class:`arrow::dataset::FileFormat` objects have properties that control how
-files are read. For example::
-
-  auto format = std::make_shared<ds::ParquetFileFormat>();
-  format->reader_options.dict_columns.insert("a");
-
-Will configure column ``"a"`` to be dictionary-encoded when read. Similarly,
-setting :member:`arrow::dataset::CsvFileFormat::parse_options` lets us change
-things like reading comma-separated or tab-separated data.
-
-Additionally, passing an :class:`arrow::dataset::FragmentScanOptions` to
-:func:`arrow::dataset::ScannerBuilder::FragmentScanOptions` offers fine-grained
-control over data scanning. For example, for CSV files, we can change what values
-are converted into Boolean true and false at scan time.
-
-.. _cpp-dataset-filtering-data:
-
-Filtering data
---------------
-
-So far, we've been reading the entire dataset, but if we need only a subset of the
-data, this can waste time or memory reading data we don't need. The
-:class:`arrow::dataset::Scanner` offers control over what data to read.
-
-In this snippet, we use :func:`arrow::dataset::ScannerBuilder::Project` to select
-which columns to read:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 172-191
-   :emphasize-lines: 16
-   :linenos:
-   :lineno-match:
-
-Some formats, such as Parquet, can reduce I/O costs here by reading only the
-specified columns from the filesystem.
-
-A filter can be provided with :func:`arrow::dataset::ScannerBuilder::Filter`, so
-that rows which do not match the filter predicate will not be included in the
-returned table. Again, some formats, such as Parquet, can use this filter to
-reduce the amount of I/O needed.
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 172-191
-   :emphasize-lines: 17
-   :linenos:
-   :lineno-match:
-
-.. TODO Expressions not documented pending renamespacing
-
-Projecting columns
-------------------
-
-In addition to selecting columns, :func:`arrow::dataset::ScannerBuilder::Project`
-can also be used for more complex projections, such as renaming columns, casting
-them to other types, and even deriving new columns based on evaluating
-expressions.
-
-In this case, we pass a vector of expressions used to construct column values
-and a vector of names for the columns:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 193-223
-   :emphasize-lines: 18-28
-   :linenos:
-   :lineno-match:
-
-This also determines the column selection; only the given columns will be
-present in the resulting table. If you want to include a derived column in
-*addition* to the existing columns, you can build up the expressions from the
-dataset schema:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 225-254
-   :emphasize-lines: 17-27
-   :linenos:
-   :lineno-match:
-
-.. note:: When combining filters and projections, Arrow will determine all
-          necessary columns to read. For instance, if you filter on a column that
-          isn't ultimately selected, Arrow will still read the column to evaluate
-          the filter.
-
-Reading and writing partitioned data
-------------------------------------
-
-So far, we've been working with datasets consisting of flat directories with
-files. Oftentimes, a dataset will have one or more columns that are frequently
-filtered on. Instead of having to read and then filter the data, by organizing the
-files into a nested directory structure, we can define a partitioned dataset,
-where sub-directory names hold information about which subset of the data is
-stored in that directory. Then, we can more efficiently filter data by using that
-information to avoid loading files that don't match the filter.
-
-For example, a dataset partitioned by year and month may have the following layout:
-
-.. code-block:: text
-
-   dataset_name/
-     year=2007/
-       month=01/
-          data0.parquet
-          data1.parquet
-          ...
-       month=02/
-          data0.parquet
-          data1.parquet
-          ...
-       month=03/
-       ...
-     year=2008/
-       month=01/
-       ...
-     ...
-
-The above partitioning scheme is using "/key=value/" directory names, as found in
-Apache Hive. Under this convention, the file at
-``dataset_name/year=2007/month=01/data0.parquet`` contains only data for which
-``year == 2007`` and ``month == 01``.
-
-Let's create a small partitioned dataset. For this, we'll use Arrow's dataset
-writing functionality.
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 106-149
-   :emphasize-lines: 25-42
-   :linenos:
-   :lineno-match:
-
-The above created a directory with two subdirectories ("part=a" and "part=b"),
-and the Parquet files written in those directories no longer include the "part"
-column.
-
-Reading this dataset, we now specify that the dataset should use a Hive-like
-partitioning scheme:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 256-279
-   :emphasize-lines: 7,9-11
-   :linenos:
-   :lineno-match:
-
-Although the partition fields are not included in the actual Parquet files,
-they will be added back to the resulting table when scanning this dataset:
-
-.. code-block:: text
-
-   $ ./debug/dataset_documentation_example file:///tmp parquet_hive partitioned
-   Found fragment: /tmp/parquet_dataset/part=a/part0.parquet
-   Partition expression: (part == "a")
-   Found fragment: /tmp/parquet_dataset/part=b/part1.parquet
-   Partition expression: (part == "b")
-   Read 20 rows
-   a: int64
-     -- field metadata --
-     PARQUET:field_id: '1'
-   b: double
-     -- field metadata --
-     PARQUET:field_id: '2'
-   c: int64
-     -- field metadata --
-     PARQUET:field_id: '3'
-   part: string
-   ----
-   # snip...
-
-We can now filter on the partition keys, which avoids loading files
-altogether if they do not match the filter:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 281-301
-   :emphasize-lines: 15-18
-   :linenos:
-   :lineno-match:
-
-Different partitioning schemes
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The above example uses a Hive-like directory scheme, such as "/year=2009/month=11/day=15".
-We specified this by passing the Hive partitioning factory. In this case, the types of
-the partition keys are inferred from the file paths.
-
-It is also possible to directly construct the partitioning and explicitly define
-the schema of the partition keys. For example:
-
-.. code-block:: cpp
-
-    auto part = std::make_shared<ds::HivePartitioning>(arrow::schema({
-        arrow::field("year", arrow::int16()),
-        arrow::field("month", arrow::int8()),
-        arrow::field("day", arrow::int32())
-    }));
-
-Arrow supports another partitioning scheme, "directory partitioning", where the
-segments in the file path represent the values of the partition keys without
-including the name (the field names are implicit in the segment's index). For
-example, given field names "year", "month", and "day", one path might be
-"/2019/11/15".
-
-Since the names are not included in the file paths, these must be specified
-when constructing a directory partitioning:
-
-.. code-block:: cpp
-
-    auto part = ds::DirectoryPartitioning::MakeFactory({"year", "month", "day"});
-
-Directory partitioning also supports providing a full schema rather than inferring
-types from file paths.
-
-Reading from other data sources
--------------------------------
-
-Reading in-memory data
-~~~~~~~~~~~~~~~~~~~~~~
-
-If you already have data in memory that you'd like to use with the Datasets API
-(e.g. to filter/project data, or to write it out to a filesystem), you can wrap it
-in an :class:`arrow::dataset::InMemoryDataset`:
-
-.. code-block:: cpp
-
-   auto table = arrow::Table::FromRecordBatches(...);
-   auto dataset = std::make_shared<arrow::dataset::InMemoryDataset>(std::move(table));
-   // Scan the dataset, filter, it, etc.
-   auto scanner_builder = dataset->NewScan();
-
-In the example, we used the InMemoryDataset to write our example data to local
-disk which was used in the rest of the example:
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :lines: 106-149
-   :emphasize-lines: 24-28
-   :linenos:
-   :lineno-match:
-
-.. _cpp-dataset-cloud-storage:
-
-Reading from cloud storage
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In addition to local files, Arrow Datasets also support reading from cloud
-storage systems, such as Amazon S3, by passing a different filesystem.
-
-See the :ref:`filesystem <cpp-filesystems>` docs for more details on the available
-filesystems.
-
-.. _cpp-dataset-full-example:
-
-Full Example
-------------
-
-.. literalinclude:: ../../../cpp/examples/arrow/dataset_documentation_example.cc
-   :language: cpp
-   :linenos:
diff --git a/docs/source/cpp/datatypes.rst b/docs/source/cpp/datatypes.rst
deleted file mode 100644
index 9149420..0000000
--- a/docs/source/cpp/datatypes.rst
+++ /dev/null
@@ -1,68 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-Data Types
-==========
-
-.. seealso::
-   :doc:`Datatype API reference <api/datatype>`.
-
-Data types govern how physical data is interpreted.  Their :ref:`specification
-<format_columnar>` allows binary interoperability between different Arrow
-implementations, including from different programming languages and runtimes
-(for example it is possible to access the same data, without copying, from
-both Python and Java using the :py:mod:`pyarrow.jvm` bridge module).
-
-Information about a data type in C++ can be represented in three ways:
-
-1. Using a :class:`arrow::DataType` instance (e.g. as a function argument)
-2. Using a :class:`arrow::DataType` concrete subclass (e.g. as a template
-   parameter)
-3. Using a :type:`arrow::Type::type` enum value (e.g. as the condition of
-   a switch statement)
-
-The first form (using a :class:`arrow::DataType` instance) is the most idiomatic
-and flexible.  Runtime-parametric types can only be fully represented with
-a DataType instance.  For example, a :class:`arrow::TimestampType` needs to be
-constructed at runtime with a :type:`arrow::TimeUnit::type` parameter; a
-:class:`arrow::Decimal128Type` with *scale* and *precision* parameters;
-a :class:`arrow::ListType` with a full child type (itself a
-:class:`arrow::DataType` instance).
-
-The two other forms can be used where performance is critical, in order to
-avoid paying the price of dynamic typing and polymorphism.  However, some
-amount of runtime switching can still be required for parametric types.
-It is not possible to reify all possible types at compile time, since Arrow
-data types allows arbitrary nesting.
-
-Creating data types
--------------------
-
-To instantiate data types, it is recommended to call the provided
-:ref:`factory functions <api-type-factories>`::
-
-   std::shared_ptr<arrow::DataType> type;
-
-   // A 16-bit integer type
-   type = arrow::int16();
-   // A 64-bit timestamp type (with microsecond granularity)
-   type = arrow::timestamp(arrow::TimeUnit::MICRO);
-   // A list type of single-precision floating-point values
-   type = arrow::list(arrow::float32());
diff --git a/docs/source/cpp/examples/cmake_minimal_build.rst b/docs/source/cpp/examples/cmake_minimal_build.rst
deleted file mode 100644
index f135de8..0000000
--- a/docs/source/cpp/examples/cmake_minimal_build.rst
+++ /dev/null
@@ -1,28 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-Minimal build using CMake
-==========================
-
-The folder ``cpp/examples/minimal_build/`` located inside the source tree
-contains a Docker-based example of building and using Arrow from a
-third-party project, using CMake.  The
-`README <https://github.com/apache/arrow/tree/master/cpp/examples/minimal_build/README.md>`_
-file in that folder has more information.
diff --git a/docs/source/cpp/examples/dataset_documentation_example.rst b/docs/source/cpp/examples/dataset_documentation_example.rst
deleted file mode 100644
index 2bc993f..0000000
--- a/docs/source/cpp/examples/dataset_documentation_example.rst
+++ /dev/null
@@ -1,27 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-Arrow Datasets example
-=========================
-
-The file ``cpp/examples/arrow/dataset_documentation_example.cc``
-located inside the source tree contains an example of using Arrow
-Datasets to read, write, select, and filter data. :doc:`../dataset`
-has a full walkthrough of the example.
diff --git a/docs/source/cpp/examples/index.rst b/docs/source/cpp/examples/index.rst
deleted file mode 100644
index d365960..0000000
--- a/docs/source/cpp/examples/index.rst
+++ /dev/null
@@ -1,27 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-Examples
-========
-
-.. toctree::
-   :maxdepth: 1
-
-   cmake_minimal_build
-   dataset_documentation_example
-   row_columnar_conversion
-   std::tuple-like ranges to Arrow <tuple_range_conversion>
diff --git a/docs/source/cpp/examples/row_columnar_conversion.rst b/docs/source/cpp/examples/row_columnar_conversion.rst
deleted file mode 100644
index 3f45864..0000000
--- a/docs/source/cpp/examples/row_columnar_conversion.rst
+++ /dev/null
@@ -1,27 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-Row to columnar conversion
-==========================
-
-The following example converts an array of structs to a :class:`arrow::Table`
-instance, and then converts it back to the original array of structs.
-
-.. literalinclude:: ../../../../cpp/examples/arrow/row_wise_conversion_example.cc
diff --git a/docs/source/cpp/examples/tuple_range_conversion.rst b/docs/source/cpp/examples/tuple_range_conversion.rst
deleted file mode 100644
index 64ba237..0000000
--- a/docs/source/cpp/examples/tuple_range_conversion.rst
+++ /dev/null
@@ -1,106 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-Conversion of range of ``std::tuple``-like to ``Table`` instances
-=================================================================
-
-While the above example shows a quite manual approach of a row to columnar
-conversion, Arrow also provides some template logic to convert ranges of
-``std::tuple<..>``-like objects to tables.
-
-In the most simple case, you only need to provide the input data and the
-type conversion is then inferred at compile time.
-
-.. code::
-
-   std::vector<std::tuple<double, std::string>> rows = ..
-   std::shared_ptr<Table> table;
-
-   if (!arrow::stl::TableFromTupleRange(
-         arrow::default_memory_pool(),
-         rows, names, &table).ok()
-   ) {
-     // Error handling code should go here.
-   }
-
-In reverse, you can use ``TupleRangeFromTable`` to fill an already
-pre-allocated range with the data from a ``Table`` instance.
-
-.. code::
-
-    // An important aspect here is that the table columns need to be in the
-    // same order as the columns will later appear in the tuple. As the tuple
-    // is unnamed, matching is done on positions.
-    std::shared_ptr<Table> table = ..
-
-    // The range needs to be pre-allocated to the respective amount of rows.
-    // This allows us to pass in an arbitrary range object, not only
-    // `std::vector`.
-    std::vector<std::tuple<double, std::string>> rows(2);
-    if (!arrow::stl::TupleRangeFromTable(*table, &rows).ok()) {
-      // Error handling code should go here.
-    }
-
-Arrow itself already supports some C(++) data types for this conversion. If you
-want to support additional data types, you need to implement a specialization
-of ``arrow::stl::ConversionTraits<T>`` and the more general
-``arrow::CTypeTraits<T>``.
-
-
-.. code::
-
-    namespace arrow {
-
-    template<>
-    struct CTypeTraits<boost::posix_time::ptime> {
-      using ArrowType = ::arrow::TimestampType;
-
-      static std::shared_ptr<::arrow::DataType> type_singleton() {
-        return ::arrow::timestamp(::arrow::TimeUnit::MICRO);
-      }
-    };
-
-    }
-
-    namespace arrow { namespace stl {
-
-    template <>
-    struct ConversionTraits<boost::posix_time::ptime> : public CTypeTraits<boost::posix_time::ptime> {
-      constexpr static bool nullable = false;
-
-      // This is the specialization to load a scalar value into an Arrow builder.
-      static Status AppendRow(
-            typename TypeTraits<TimestampType>::BuilderType& builder,
-            boost::posix_time::ptime cell) {
-        boost::posix_time::ptime const epoch({1970, 1, 1}, {0, 0, 0, 0});
-        return builder.Append((cell - epoch).total_microseconds());
-      }
-
-      // Specify how we can fill the tuple from the values stored in the Arrow
-      // array.
-      static boost::posix_time::ptime GetEntry(
-            const TimestampArray& array, size_t j) {
-        return psapp::arrow::internal::timestamp_epoch
-            + boost::posix_time::time_duration(0, 0, 0, array.Value(j));
-      }
-    };
-
-    }}
-
diff --git a/docs/source/cpp/flight.rst b/docs/source/cpp/flight.rst
deleted file mode 100644
index c1d2e43..0000000
--- a/docs/source/cpp/flight.rst
+++ /dev/null
@@ -1,119 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-================
-Arrow Flight RPC
-================
-
-Arrow Flight is an RPC framework for efficient transfer of Flight data
-over the network. See :doc:`../format/Flight` for full details on
-the protocol, or :doc:`./api/flight` for API docs.
-
-Writing a Flight Service
-========================
-
-Servers are subclasses of :class:`arrow::flight::FlightServerBase`. To
-implement individual RPCs, override the RPC methods on this class.
-
-.. code-block:: cpp
-
-   class MyFlightServer : public FlightServerBase {
-     Status ListFlights(const ServerCallContext& context, const Criteria* criteria,
-                        std::unique_ptr<FlightListing>* listings) override {
-       std::vector<FlightInfo> flights = ...;
-       *listings = std::unique_ptr<FlightListing>(new SimpleFlightListing(flights));
-       return Status::OK();
-     }
-   };
-
-Each RPC method always takes a
-:class:`arrow::flight::ServerCallContext` for common parameters and
-returns a :class:`arrow::Status` to indicate success or
-failure. Flight-specific error codes can be returned via
-:func:`arrow::flight::MakeFlightError`.
-
-RPC methods that return a value in addition to a status will use an
-out parameter, as shown above. Often, there are helper classes
-providing basic implementations of these out parameters. For instance,
-above, :class:`arrow::flight::SimpleFlightListing` uses a vector of
-:class:`arrow::flight::FlightInfo` objects as the result of a
-``ListFlights`` RPC.
-
-To start a server, create a :class:`arrow::flight::Location` to
-specify where to listen, and call
-:func:`arrow::flight::FlightServerBase::Init`. This will start the
-server, but won't block the rest of the program. Use
-:func:`arrow::flight::FlightServerBase::SetShutdownOnSignals` to
-enable stopping the server if an interrupt signal is received, then
-call :func:`arrow::flight::FlightServerBase::Serve` to block until the
-server stops.
-
-.. code-block:: cpp
-
-   std::unique_ptr<arrow::flight::FlightServerBase> server;
-   // Initialize server
-   arrow::flight::Location location;
-   // Listen to all interfaces on a free port
-   ARROW_CHECK_OK(arrow::flight::Location::ForGrpcTcp("0.0.0.0", 0, &location));
-   arrow::flight::FlightServerOptions options(location);
-
-   // Start the server
-   ARROW_CHECK_OK(server->Init(options));
-   // Exit with a clean error code (0) on SIGTERM
-   ARROW_CHECK_OK(server->SetShutdownOnSignals({SIGTERM}));
-
-   std::cout << "Server listening on localhost:" << server->port() << std::endl;
-   ARROW_CHECK_OK(server->Serve());
-
-
-Enabling TLS and Authentication
--------------------------------
-
-TLS can be enabled by providing a certificate and key pair to
-:func:`FlightServerBase::Init
-<arrow::flight::FlightServerBase::Init>`. Additionally, use
-:func:`Location::ForGrpcTls <arrow::flight::Location::ForGrpcTls>` to
-construct the :class:`arrow::flight::Location` to listen on.
-
-Similarly, authentication can be enabled by providing an
-implementation of :class:`ServerAuthHandler
-<arrow::flight::ServerAuthHandler>`. Authentication consists of two
-parts: on initial client connection, the server and client
-authentication implementations can perform any negotiation needed;
-then, on each RPC thereafter, the client provides a token. The server
-authentication handler validates the token and provides the identity
-of the client. This identity can be obtained from the
-:class:`arrow::flight::ServerCallContext`.
-
-Using the Flight Client
-=======================
-
-To connect to a Flight service, create an instance of
-:class:`arrow::flight::FlightClient` by calling :func:`Connect
-<arrow::flight::FlightClient::Connect>`. This takes a Location and
-returns the client through an out parameter. To authenticate, call
-:func:`Authenticate <arrow::flight::FlightClient::Authenticate>` with
-the desired client authentication implementation.
-
-Each RPC method returns :class:`arrow::Status` to indicate the
-success/failure of the request. Any other return values are specified
-through out parameters. They also take an optional :class:`options
-<arrow::flight::FlightCallOptions>` parameter that allows specifying a
-timeout for the call.
diff --git a/docs/source/cpp/getting_started.rst b/docs/source/cpp/getting_started.rst
deleted file mode 100644
index d6cfb17..0000000
--- a/docs/source/cpp/getting_started.rst
+++ /dev/null
@@ -1,40 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-User Guide
-==========
-
-.. toctree::
-
-   overview
-   conventions
-   cmake
-   memory
-   arrays
-   datatypes
-   tables
-   compute
-   io
-   ipc
-   parquet
-   csv
-   json
-   dataset
-   flight
diff --git a/docs/source/cpp/index.rst b/docs/source/cpp/index.rst
deleted file mode 100644
index b3f6e4c..0000000
--- a/docs/source/cpp/index.rst
+++ /dev/null
@@ -1,32 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-C++ Implementation
-==================
-
-.. toctree::
-   :maxdepth: 2
-
-   getting_started
-   Examples <examples/index>
-   api
-
-.. TODO add "topics" chapter
-.. - nested arrays
-.. - dictionary encoding
-
-.. TODO add "building" or "development" chapter
diff --git a/docs/source/cpp/io.rst b/docs/source/cpp/io.rst
deleted file mode 100644
index 6e1d261..0000000
--- a/docs/source/cpp/io.rst
+++ /dev/null
@@ -1,87 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-.. cpp:namespace:: arrow::io
-
-==============================
-Input / output and filesystems
-==============================
-
-Arrow provides a range of C++ interfaces abstracting the concrete details
-of input / output operations.  They operate on streams of untyped binary data.
-Those abstractions are used for various purposes such as reading CSV or
-Parquet data, transmitting IPC streams, and more.
-
-.. seealso::
-   :doc:`API reference for input/output facilities <api/io>`.
-
-Reading binary data
-===================
-
-Interfaces for reading binary data come in two flavours:
-
-* Sequential reading: the :class:`InputStream` interface provides
-  ``Read`` methods; it is recommended to ``Read`` to a ``Buffer`` as it
-  may in some cases avoid a memory copy.
-
-* Random access reading: the :class:`RandomAccessFile` interface
-  provides additional facilities for positioning and, most importantly,
-  the ``ReadAt`` methods which allow parallel reading from multiple threads.
-
-Concrete implementations are available for :class:`in-memory reads <BufferReader>`,
-:class:`unbuffered file reads <ReadableFile>`,
-:class:`memory-mapped file reads <MemoryMappedFile>`,
-:class:`buffered reads <BufferedInputStream>`,
-:class:`compressed reads <CompressedInputStream>`.
-
-Writing binary data
-===================
-
-Writing binary data is mostly done through the :class:`OutputStream`
-interface.
-
-Concrete implementations are available for :class:`in-memory writes <BufferOutputStream>`,
-:class:`unbuffered file writes <FileOutputStream>`,
-:class:`memory-mapped file writes <MemoryMappedFile>`,
-:class:`buffered writes <BufferedOutputStream>`,
-:class:`compressed writes <CompressedOutputStream>`.
-
-.. cpp:namespace:: arrow::fs
-
-.. _cpp-filesystems:
-
-Filesystems
-===========
-
-The :class:`filesystem interface <FileSystem>` allows abstracted access over
-various data storage backends such as the local filesystem or a S3 bucket.
-It provides input and output streams as well as directory operations.
-
-The filesystem interface exposes a simplified view of the underlying data
-storage.  Data paths are represented as *abstract paths*, which are
-``/``-separated, even on Windows, and shouldn't include special path
-components such as ``.`` and ``..``.  Symbolic links, if supported by the
-underlying storage, are automatically dereferenced.  Only basic
-:class:`metadata <FileStats>` about file entries, such as the file size
-and modification time, is made available.
-
-Concrete implementations are available for
-:class:`local filesystem access <LocalFileSystem>`,
-:class:`HDFS <HadoopFileSystem>` and
-:class:`Amazon S3-compatible storage <S3FileSystem>`.
diff --git a/docs/source/cpp/ipc.rst b/docs/source/cpp/ipc.rst
deleted file mode 100644
index ce4175b..0000000
--- a/docs/source/cpp/ipc.rst
+++ /dev/null
@@ -1,75 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-.. cpp:namespace:: arrow::ipc
-
-========================================
-Reading and writing the Arrow IPC format
-========================================
-
-.. seealso::
-   :ref:`Arrow IPC format specification <format-ipc>`.
-
-   :doc:`API reference for IPC readers and writers <api/ipc>`.
-
-Arrow C++ provides readers and writers for the Arrow IPC format which wrap
-lower level input/output, handled through the :doc:`IO interfaces <io>`.
-For reading, there is also an event-driven API that enables feeding
-arbitrary data into the IPC decoding layer asynchronously.
-
-Reading IPC streams and files
-=============================
-
-Synchronous reading
--------------------
-
-For most cases, it is most convenient to use the :class:`RecordBatchStreamReader`
-or :class:`RecordBatchFileReader` class, depending on which variant of the IPC
-format you want to read.  The former requires a :class:`~arrow::io::InputStream`
-source, while the latter requires a :class:`~arrow::io::RandomAccessFile`.
-
-Reading Arrow IPC data is inherently zero-copy if the source allows it.
-For example, a :class:`~arrow::io::BufferReader` or :class:`~arrow::io::MemoryMappedFile`
-can typically be zero-copy.  Exceptions are when the data must be transformed
-on the fly, e.g. when buffer compression has been enabled on the IPC stream
-or file.
-
-Event-driven reading
---------------------
-
-When it is necessary to process the IPC format without blocking (for example
-to integrate Arrow with an event loop), or if data is coming from an unusual
-source, use the event-driven :class:`StreamDecoder`.  You will need to define
-a subclass of :class:`Listener` and implement the virtual methods for the
-desired events (for example, implement :func:`Listener::OnRecordBatchDecoded`
-to be notified of each incoming :class:`RecordBatch`).
-
-Writing IPC streams and files
-=============================
-
-Use one of the factory functions, :func:`MakeStreamWriter` or
-:func:`MakeFileWriter`, to obtain a :class:`RecordBatchWriter` instance for
-the given IPC format variant.
-
-Configuring
-===========
-
-Various aspects of reading and writing the IPC format can be configured
-using the :class:`IpcReadOptions` and :class:`IpcWriteOptions` classes,
-respectively.
diff --git a/docs/source/cpp/json.rst b/docs/source/cpp/json.rst
deleted file mode 100644
index cdb742e..0000000
--- a/docs/source/cpp/json.rst
+++ /dev/null
@@ -1,128 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-.. cpp:namespace:: arrow::json
-
-==================
-Reading JSON files
-==================
-
-Arrow allows reading line-separated JSON files as Arrow tables.  Each
-independent JSON object in the input file is converted to a row in
-the target Arrow table.
-
-.. seealso::
-   :ref:`JSON reader API reference <cpp-api-json>`.
-
-Basic usage
-===========
-
-A JSON file is read from a :class:`~arrow::io::InputStream`.
-
-.. code-block:: cpp
-
-   #include "arrow/json/api.h"
-
-   {
-      // ...
-      arrow::Status st;
-      arrow::MemoryPool* pool = default_memory_pool();
-      std::shared_ptr<arrow::io::InputStream> input = ...;
-
-      auto read_options = arrow::json::ReadOptions::Defaults();
-      auto parse_options = arrow::json::ParseOptions::Defaults();
-
-      // Instantiate TableReader from input stream and options
-      std::shared_ptr<arrow::json::TableReader> reader;
-      st = arrow::json::TableReader::Make(pool, input, read_options,
-                                          parse_options, &reader);
-      if (!st.ok()) {
-         // Handle TableReader instantiation error...
-      }
-
-      std::shared_ptr<arrow::Table> table;
-      // Read table from JSON file
-      st = reader->Read(&table);
-      if (!st.ok()) {
-         // Handle JSON read error
-         // (for example a JSON syntax error or failed type conversion)
-      }
-   }
-
-Data types
-==========
-
-Since JSON values are typed, the possible Arrow data types on output
-depend on the input value types.  Top-level JSON values should always be
-objects.  The fields of top-level objects are taken to represent columns
-in the Arrow data.  For each name/value pair in a JSON object, there are
-two possible modes of deciding the output data type:
-
-* if the name is in :class:`ConvertOptions::explicit_schema`,
-  conversion of the JSON value to the corresponding Arrow data type is
-  attempted;
-
-* otherwise, the Arrow data type is determined via type inference on
-  the JSON value, trying out a number of Arrow data types in order.
-
-The following tables show the possible combinations for each of those
-two modes.
-
-.. table:: Explicit conversions from JSON to Arrow
-   :align: center
-
-   +-----------------+----------------------------------------------------+
-   | JSON value type | Allowed Arrow data types                           |
-   +=================+====================================================+
-   | Null            | Any (including Null)                               |
-   +-----------------+----------------------------------------------------+
-   | Number          | All Integer types, Float32, Float64,               |
-   |                 | Date32, Date64, Time32, Time64                     |
-   +-----------------+----------------------------------------------------+
-   | Boolean         | Boolean                                            |
-   +-----------------+----------------------------------------------------+
-   | String          | Binary, LargeBinary, String, LargeString,          |
-   |                 | Timestamp                                          |
-   +-----------------+----------------------------------------------------+
-   | Array           | List                                               |
-   +-----------------+----------------------------------------------------+
-   | Object (nested) | Struct                                             |
-   +-----------------+----------------------------------------------------+
-
-.. table:: Implicit type inference from JSON to Arrow
-   :align: center
-
-   +-----------------+----------------------------------------------------+
-   | JSON value type | Inferred Arrow data types (in order)               |
-   +=================+====================================================+
-   | Null            | Null, any other                                    |
-   +-----------------+----------------------------------------------------+
-   | Number          | Int64, Float64                                     |
-   |                 |                                                    |
-   +-----------------+----------------------------------------------------+
-   | Boolean         | Boolean                                            |
-   +-----------------+----------------------------------------------------+
-   | String          | Timestamp (with seconds unit), String              |
-   |                 |                                                    |
-   +-----------------+----------------------------------------------------+
-   | Array           | List                                               |
-   +-----------------+----------------------------------------------------+
-   | Object (nested) | Struct                                             |
-   +-----------------+----------------------------------------------------+
diff --git a/docs/source/cpp/memory.rst b/docs/source/cpp/memory.rst
deleted file mode 100644
index 415a3ae..0000000
--- a/docs/source/cpp/memory.rst
+++ /dev/null
@@ -1,185 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-=================
-Memory Management
-=================
-
-.. seealso::
-   :doc:`Memory management API reference <api/memory>`
-
-Buffers
-=======
-
-To avoid passing around raw data pointers with varying and non-obvious
-lifetime rules, Arrow provides a generic abstraction called :class:`arrow::Buffer`.
-A Buffer encapsulates a pointer and data size, and generally also ties its
-lifetime to that of an underlying provider (in other words, a Buffer should
-*always* point to valid memory till its destruction).  Buffers are untyped:
-they simply denote a physical memory area regardless of its intended meaning
-or interpretation.
-
-Buffers may be allocated by Arrow itself , or by third-party routines.
-For example, it is possible to pass the data of a Python bytestring as a Arrow
-buffer, keeping the Python object alive as necessary.
-
-In addition, buffers come in various flavours: mutable or not, resizable or
-not.  Generally, you will hold a mutable buffer when building up a piece
-of data, then it will be frozen as an immutable container such as an
-:doc:`array <arrays>`.
-
-.. note::
-   Some buffers may point to non-CPU memory, such as GPU-backed memory
-   provided by a CUDA context.  If you're writing a GPU-aware application,
-   you will need to be careful not to interpret a GPU memory pointer as
-   a CPU-reachable pointer, or vice-versa.
-
-Accessing Buffer Memory
------------------------
-
-Buffers provide fast access to the underlying memory using the
-:func:`~arrow::Buffer::size` and :func:`~arrow::Buffer::data` accessors
-(or :func:`~arrow::Buffer::mutable_data` for writable access to a mutable
-buffer).
-
-Slicing
--------
-
-It is possible to make zero-copy slices of buffers, to obtain a buffer
-referring to some contiguous subset of the underlying data.  This is done
-by calling the :func:`arrow::SliceBuffer` and :func:`arrow::SliceMutableBuffer`
-functions.
-
-Allocating a Buffer
--------------------
-
-You can allocate a buffer yourself by calling one of the
-:func:`arrow::AllocateBuffer` or :func:`arrow::AllocateResizableBuffer`
-overloads::
-
-   arrow::Result<std::unique_ptr<Buffer>> maybe_buffer = arrow::AllocateBuffer(4096);
-   if (!maybe_buffer.ok()) {
-      // ... handle allocation error
-   }
-
-   std::shared_ptr<arrow::Buffer> buffer = *std::move(maybe_buffer);
-   uint8_t* buffer_data = buffer->mutable_data();
-   memcpy(buffer_data, "hello world", 11);
-
-Allocating a buffer this way ensures it is 64-bytes aligned and padded
-as recommended by the :doc:`Arrow memory specification <../format/Layout>`.
-
-Building a Buffer
------------------
-
-You can also allocate *and* build a Buffer incrementally, using the
-:class:`arrow::BufferBuilder` API::
-
-   BufferBuilder builder;
-   builder.Resize(11);
-   builder.Append("hello ", 6);
-   builder.Append("world", 5);
-
-   std::shared_ptr<arrow::Buffer> buffer;
-   if (!builder.Finish(&buffer).ok()) {
-      // ... handle buffer allocation error
-   }
-
-Memory Pools
-============
-
-When allocating a Buffer using the Arrow C++ API, the buffer's underlying
-memory is allocated by a :class:`arrow::MemoryPool` instance.  Usually this
-will be the process-wide *default memory pool*, but many Arrow APIs allow
-you to pass another MemoryPool instance for their internal allocations.
-
-Memory pools are used for large long-lived data such as array buffers.
-Other data, such as small C++ objects and temporary workspaces, usually
-goes through the regular C++ allocators.
-
-Default Memory Pool
--------------------
-
-The default memory pool depends on how Arrow C++ was compiled:
-
-- if enabled at compile time, a `jemalloc <http://jemalloc.net/>`_ heap;
-- otherwise, if enabled at compile time, a
-  `mimalloc <https://github.com/microsoft/mimalloc>`_ heap;
-- otherwise, the C library ``malloc`` heap.
-
-Overriding the Default Memory Pool
-----------------------------------
-
-One can override the above selection algorithm by setting the
-``ARROW_DEFAULT_MEMORY_POOL`` environment variable to one of the following
-values: ``jemalloc``, ``mimalloc`` or ``system``.  This variable is inspected
-once when Arrow C++ is loaded in memory (for example when the Arrow C++ DLL
-is loaded).
-
-STL Integration
----------------
-
-If you wish to use a Arrow memory pool to allocate the data of STL containers,
-you can do so using the :class:`arrow::stl::allocator` wrapper.
-
-Conversely, you can also use a STL allocator to allocate Arrow memory,
-using the :class:`arrow::stl::STLMemoryPool` class.  However, this may be less
-performant, as STL allocators don't provide a resizing operation.
-
-Devices
-=======
-
-Many Arrow applications only access host (CPU) memory.  However, in some cases
-it is desirable to handle on-device memory (such as on-board memory on a GPU)
-as well as host memory.
-
-Arrow represents the CPU and other devices using the
-:class:`arrow::Device` abstraction.  The associated class :class:`arrow::MemoryManager`
-specifies how to allocate on a given device.  Each device has a default memory manager, but
-additional instances may be constructed (for example, wrapping a custom
-:class:`arrow::MemoryPool` the CPU).
-:class:`arrow::MemoryManager` instances which specify how to allocate
-memory on a given device (for example, using a particular
-:class:`arrow::MemoryPool` on the CPU).
-
-Device-Agnostic Programming
----------------------------
-
-If you receive a Buffer from third-party code, you can query whether it is
-CPU-readable by calling its :func:`~arrow::Buffer::is_cpu` method.
-
-You can also view the Buffer on a given device, in a generic way, by calling
-:func:`arrow::Buffer::View` or :func:`arrow::Buffer::ViewOrCopy`.  This will
-be a no-operation if the source and destination devices are identical.
-Otherwise, a device-dependent mechanism will attempt to construct a memory
-address for the destination device that gives access to the buffer contents.
-Actual device-to-device transfer may happen lazily, when reading the buffer
-contents.
-
-Similarly, if you want to do I/O on a buffer without assuming a CPU-readable
-buffer, you can call :func:`arrow::Buffer::GetReader` and
-:func:`arrow::Buffer::GetWriter`.
-
-For example, to get an on-CPU view or copy of an arbitrary buffer, you can
-simply do::
-
-   std::shared_ptr<arrow::Buffer> arbitrary_buffer = ... ;
-   std::shared_ptr<arrow::Buffer> cpu_buffer = arrow::Buffer::ViewOrCopy(
-      arbitrary_buffer, arrow::default_cpu_memory_manager());
diff --git a/docs/source/cpp/overview.rst b/docs/source/cpp/overview.rst
deleted file mode 100644
index ccebdba..0000000
--- a/docs/source/cpp/overview.rst
+++ /dev/null
@@ -1,97 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-High-Level Overview
-===================
-
-The Arrow C++ library is comprised of different parts, each of which serves
-a specific purpose.
-
-The physical layer
-------------------
-
-**Memory management** abstractions provide a uniform API over memory that
-may be allocated through various means, such as heap allocation, the memory
-mapping of a file or a static memory area.  In particular, the **buffer**
-abstraction represents a contiguous area of physical data.
-
-The one-dimensional layer
--------------------------
-
-**Data types** govern the *logical* interpretation of *physical* data.
-Many operations in Arrow are parametered, at compile-time or at runtime,
-by a data type.
-
-**Arrays** assemble one or several buffers with a data type, allowing to
-view them as a logical contiguous sequence of values (possibly nested).
-
-**Chunked arrays** are a generalization of arrays, comprising several same-type
-arrays into a longer logical sequence of values.
-
-The two-dimensional layer
--------------------------
-
-**Schemas** describe a logical collection of several pieces of data,
-each with a distinct name and type, and optional metadata.
-
-**Tables** are collections of chunked array in accordance to a schema. They
-are the most capable dataset-providing abstraction in Arrow.
-
-**Record batches** are collections of contiguous arrays, described
-by a schema.  They allow incremental construction or serialization of tables.
-
-The compute layer
------------------
-
-**Datums** are flexible dataset references, able to hold for example an array or table
-reference.
-
-**Kernels** are specialized computation functions running in a loop over a
-given set of datums representing input and output parameters to the functions.
-
-The IO layer
-------------
-
-**Streams** allow untyped sequential or seekable access over external data
-of various kinds (for example compressed or memory-mapped).
-
-The Inter-Process Communication (IPC) layer
--------------------------------------------
-
-A **messaging format** allows interchange of Arrow data between processes, using
-as few copies as possible.
-
-The file formats layer
-----------------------
-
-Reading and writing Arrow data from/to various file formats is possible, for
-example **Parquet**, **CSV**, **Orc** or the Arrow-specific **Feather** format.
-
-The devices layer
------------------
-
-Basic **CUDA** integration is provided, allowing to describe Arrow data backed
-by GPU-allocated memory.
-
-The filesystem layer
---------------------
-
-A filesystem abstraction allows reading and writing data from different storage
-backends, such as the local filesystem or a S3 bucket.
diff --git a/docs/source/cpp/parquet.rst b/docs/source/cpp/parquet.rst
deleted file mode 100644
index d69bf1c..0000000
--- a/docs/source/cpp/parquet.rst
+++ /dev/null
@@ -1,432 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-.. cpp:namespace:: parquet
-
-=================================
-Reading and writing Parquet files
-=================================
-
-.. seealso::
-   :ref:`Parquet reader and writer API reference <cpp-api-parquet>`.
-
-The `Parquet format <https://parquet.apache.org/documentation/latest/>`__
-is a space-efficient columnar storage format for complex data.  The Parquet
-C++ implementation is part of the Apache Arrow project and benefits
-from tight integration with the Arrow C++ classes and facilities.
-
-Supported Parquet features
-==========================
-
-The Parquet format has many features, and Parquet C++ supports a subset of them.
-
-Page types
-----------
-
-+-------------------+---------+
-| Page type         | Notes   |
-+===================+=========+
-| DATA_PAGE         |         |
-+-------------------+---------+
-| DATA_PAGE_V2      |         |
-+-------------------+---------+
-| DICTIONARY_PAGE   |         |
-+-------------------+---------+
-
-*Unsupported page type:* INDEX_PAGE. When reading a Parquet file, pages of
-this type are ignored.
-
-Compression
------------
-
-+-------------------+---------+
-| Compression codec | Notes   |
-+===================+=========+
-| SNAPPY            |         |
-+-------------------+---------+
-| GZIP              |         |
-+-------------------+---------+
-| BROTLI            |         |
-+-------------------+---------+
-| LZ4               | \(1)    |
-+-------------------+---------+
-| ZSTD              |         |
-+-------------------+---------+
-
-* \(1) On the read side, Parquet C++ is able to decompress both the regular
-  LZ4 block format and the ad-hoc Hadoop LZ4 format used by the
-  `reference Parquet implementation <https://github.com/apache/parquet-mr>`__.
-  On the write side, Parquet C++ always generates the ad-hoc Hadoop LZ4 format.
-
-*Unsupported compression codec:* LZO.
-
-Encodings
----------
-
-+--------------------------+---------+
-| Encoding                 | Notes   |
-+==========================+=========+
-| PLAIN                    |         |
-+--------------------------+---------+
-| PLAIN_DICTIONARY         |         |
-+--------------------------+---------+
-| BIT_PACKED               |         |
-+--------------------------+---------+
-| RLE                      | \(1)    |
-+--------------------------+---------+
-| RLE_DICTIONARY           | \(2)    |
-+--------------------------+---------+
-| BYTE_STREAM_SPLIT        |         |
-+--------------------------+---------+
-
-* \(1) Only supported for encoding definition and repetition levels, not values.
-
-* \(2) On the write path, RLE_DICTIONARY is only enabled if Parquet format version
-  2.0 (or potentially greater) is selected in :func:`WriterProperties::version`.
-
-*Unsupported encodings:* DELTA_BINARY_PACKED, DELTA_LENGTH_BYTE_ARRAY,
-DELTA_BYTE_ARRAY.
-
-Types
------
-
-Physical types
-~~~~~~~~~~~~~~
-
-+--------------------------+-------------------------+------------+
-| Physical type            | Mapped Arrow type       | Notes      |
-+==========================+=========================+============+
-| BOOLEAN                  | Boolean                 |            |
-+--------------------------+-------------------------+------------+
-| INT32                    | Int32 / other           | \(1)       |
-+--------------------------+-------------------------+------------+
-| INT64                    | Int64 / other           | \(1)       |
-+--------------------------+-------------------------+------------+
-| INT96                    | Timestamp (nanoseconds) | \(2)       |
-+--------------------------+-------------------------+------------+
-| FLOAT                    | Float32                 |            |
-+--------------------------+-------------------------+------------+
-| DOUBLE                   | Float64                 |            |
-+--------------------------+-------------------------+------------+
-| BYTE_ARRAY               | Binary / other          | \(1) \(3)  |
-+--------------------------+-------------------------+------------+
-| FIXED_LENGTH_BYTE_ARRAY  | FixedSizeBinary / other | \(1)       |
-+--------------------------+-------------------------+------------+
-
-* \(1) Can be mapped to other Arrow types, depending on the logical type
-  (see below).
-
-* \(2) On the write side, :func:`ArrowWriterProperties::support_deprecated_int96_timestamps`
-  must be enabled.
-
-* \(3) On the write side, an Arrow LargeBinary can also mapped to BYTE_ARRAY.
-
-Logical types
-~~~~~~~~~~~~~
-
-Specific logical types can override the default Arrow type mapping for a given
-physical type.
-
-+-------------------+-----------------------------+----------------------------+---------+
-| Logical type      | Physical type               | Mapped Arrow type          | Notes   |
-+===================+=============================+============================+=========+
-| NULL              | Any                         | Null                       | \(1)    |
-+-------------------+-----------------------------+----------------------------+---------+
-| INT               | INT32                       | Int8 / UInt8 / Int16 /     |         |
-|                   |                             | UInt16 / Int32 / UInt32    |         |
-+-------------------+-----------------------------+----------------------------+---------+
-| INT               | INT64                       | Int64 / UInt64             |         |
-+-------------------+-----------------------------+----------------------------+---------+
-| DECIMAL           | INT32 / INT64 / BYTE_ARRAY  | Decimal128 / Decimal256    | \(2)    |
-|                   | / FIXED_LENGTH_BYTE_ARRAY   |                            |         |
-+-------------------+-----------------------------+----------------------------+---------+
-| DATE              | INT32                       | Date32                     | \(3)    |
-+-------------------+-----------------------------+----------------------------+---------+
-| TIME              | INT32                       | Time32 (milliseconds)      |         |
-+-------------------+-----------------------------+----------------------------+---------+
-| TIME              | INT64                       | Time64 (micro- or          |         |
-|                   |                             | nanoseconds)               |         |
-+-------------------+-----------------------------+----------------------------+---------+
-| TIMESTAMP         | INT64                       | Timestamp (milli-, micro-  |         |
-|                   |                             | or nanoseconds)            |         |
-+-------------------+-----------------------------+----------------------------+---------+
-| STRING            | BYTE_ARRAY                  | Utf8                       | \(4)    |
-+-------------------+-----------------------------+----------------------------+---------+
-| LIST              | Any                         | List                       | \(5)    |
-+-------------------+-----------------------------+----------------------------+---------+
-| MAP               | Any                         | Map                        | \(6)    |
-+-------------------+-----------------------------+----------------------------+---------+
-
-* \(1) On the write side, the Parquet physical type INT32 is generated.
-
-* \(2) On the write side, a FIXED_LENGTH_BYTE_ARRAY is always emitted.
-
-* \(3) On the write side, an Arrow Date64 is also mapped to a Parquet DATE INT32.
-
-* \(4) On the write side, an Arrow LargeUtf8 is also mapped to a Parquet STRING.
-
-* \(5) On the write side, an Arrow LargeList or FixedSizedList is also mapped to
-  a Parquet LIST.
-
-* \(6) On the read side, a key with multiple values does not get deduplicated,
-  in contradiction with the
-  `Parquet specification <https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#maps>`__.
-
-*Unsupported logical types:* JSON, BSON, UUID.  If such a type is encountered
-when reading a Parquet file, the default physical type mapping is used (for
-example, a Parquet JSON column may be read as Arrow Binary or FixedSizeBinary).
-
-Converted types
-~~~~~~~~~~~~~~~
-
-While converted types are deprecated in the Parquet format (they are superceded
-by logical types), they are recognized and emitted by the Parquet C++
-implementation so as to maximize compatibility with other Parquet
-implementations.
-
-Special cases
-~~~~~~~~~~~~~
-
-An Arrow Extension type is written out as its storage type.  It can still
-be recreated at read time using Parquet metadata (see "Roundtripping Arrow
-types" below).
-
-An Arrow Dictionary type is written out as its value type.  It can still
-be recreated at read time using Parquet metadata (see "Roundtripping Arrow
-types" below).
-
-Roundtripping Arrow types
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-While there is no bijection between Arrow types and Parquet types, it is
-possible to serialize the Arrow schema as part of the Parquet file metadata.
-This is enabled using :func:`ArrowWriterProperties::store_schema`.
-
-On the read path, the serialized schema will be automatically recognized
-and will recreate the original Arrow data, converting the Parquet data as
-required (for example, a LargeList will be recreated from the Parquet LIST
-type).
-
-As an exemple, when serializing an Arrow LargeList to Parquet:
-
-* The data is written out as a Parquet LIST
-
-* When read back, the Parquet LIST data is decoded as an Arrow LargeList if
-  :func:`ArrowWriterProperties::store_schema` was enabled when writing the file;
-  otherwise, it is decoded as an Arrow List.
-
-Serialization details
-"""""""""""""""""""""
-
-The Arrow schema is serialized as a :ref:`Arrow IPC <format-ipc>` schema message,
-then base64-encoded and stored under the ``ARROW:schema`` metadata key in
-the Parquet file metadata.
-
-Limitations
-~~~~~~~~~~~
-
-Writing or reading back FixedSizedList data with null entries is not supported.
-
-Encryption
-----------
-
-Parquet C++ implements all features specified in the
-`encryption specification <https://github.com/apache/parquet-format/blob/master/Encryption.md>`__,
-except for encryption of column index and bloom filter modules. 
-
-More specifically, Parquet C++ supports:
-
-* AES_GCM_V1 and AES_GCM_CTR_V1 encryption algorithms.
-* AAD suffix for Footer, ColumnMetaData, Data Page, Dictionary Page,
-  Data PageHeader, Dictionary PageHeader module types. Other module types
-  (ColumnIndex, OffsetIndex, BloomFilter Header, BloomFilter Bitset) are not
-  supported.
-* EncryptionWithFooterKey and EncryptionWithColumnKey modes.
-* Encrypted Footer and Plaintext Footer modes.
-
-
-Reading Parquet files
-=====================
-
-The :class:`arrow::FileReader` class reads data for an entire
-file or row group into an :class:`::arrow::Table`.
-
-The :class:`StreamReader` and :class:`StreamWriter` classes allow for
-data to be written using a C++ input/output streams approach to
-read/write fields column by column and row by row.  This approach is
-offered for ease of use and type-safety.  It is of course also useful
-when data must be streamed as files are read and written
-incrementally.
-
-Please note that the performance of the :class:`StreamReader` and
-:class:`StreamWriter` classes will not be as good due to the type
-checking and the fact that column values are processed one at a time.
-
-FileReader
-----------
-
-The Parquet :class:`arrow::FileReader` requires a
-:class:`::arrow::io::RandomAccessFile` instance representing the input
-file.
-
-.. code-block:: cpp
-
-   #include "arrow/parquet/arrow/reader.h"
-
-   {
-      // ...
-      arrow::Status st;
-      arrow::MemoryPool* pool = default_memory_pool();
-      std::shared_ptr<arrow::io::RandomAccessFile> input = ...;
-
-      // Open Parquet file reader
-      std::unique_ptr<parquet::arrow::FileReader> arrow_reader;
-      st = parquet::arrow::OpenFile(input, pool, &arrow_reader);
-      if (!st.ok()) {
-         // Handle error instantiating file reader...
-      }
-
-      // Read entire file as a single Arrow table
-      std::shared_ptr<arrow::Table> table;
-      st = arrow_reader->ReadTable(&table);
-      if (!st.ok()) {
-         // Handle error reading Parquet data...
-      }
-   }
-
-Finer-grained options are available through the
-:class:`arrow::FileReaderBuilder` helper class.
-
-.. TODO write section about performance and memory efficiency
-
-StreamReader
-------------
-
-The :class:`StreamReader` allows for Parquet files to be read using
-standard C++ input operators which ensures type-safety.
-
-Please note that types must match the schema exactly i.e. if the
-schema field is an unsigned 16-bit integer then you must supply a
-uint16_t type.
-
-Exceptions are used to signal errors.  A :class:`ParquetException` is
-thrown in the following circumstances:
-
-* Attempt to read field by supplying the incorrect type.
-
-* Attempt to read beyond end of row.
-
-* Attempt to read beyond end of file.
-
-.. code-block:: cpp
-
-   #include "arrow/io/file.h"
-   #include "parquet/stream_reader.h"
-
-   {
-      std::shared_ptr<arrow::io::ReadableFile> infile;
-
-      PARQUET_ASSIGN_OR_THROW(
-         infile,
-         arrow::io::ReadableFile::Open("test.parquet"));
-
-      parquet::StreamReader os{parquet::ParquetFileReader::Open(infile)};
-
-      std::string article;
-      float price;
-      uint32_t quantity;
-
-      while ( !os.eof() )
-      {
-         os >> article >> price >> quantity >> parquet::EndRow;
-         // ...
-      }
-   }
-
-Writing Parquet files
-=====================
-
-WriteTable
-----------
-
-The :func:`arrow::WriteTable` function writes an entire
-:class:`::arrow::Table` to an output file.
-
-.. code-block:: cpp
-
-   #include "parquet/arrow/writer.h"
-
-   {
-      std::shared_ptr<arrow::io::FileOutputStream> outfile;
-      PARQUET_ASSIGN_OR_THROW(
-         outfile,
-         arrow::io::FileOutputStream::Open("test.parquet"));
-
-      PARQUET_THROW_NOT_OK(
-         parquet::arrow::WriteTable(table, arrow::default_memory_pool(), outfile, 3));
-   }
-
-StreamWriter
-------------
-
-The :class:`StreamWriter` allows for Parquet files to be written using
-standard C++ output operators.  This type-safe approach also ensures
-that rows are written without omitting fields and allows for new row
-groups to be created automatically (after certain volume of data) or
-explicitly by using the :type:`EndRowGroup` stream modifier.
-
-Exceptions are used to signal errors.  A :class:`ParquetException` is
-thrown in the following circumstances:
-
-* Attempt to write a field using an incorrect type.
-
-* Attempt to write too many fields in a row.
-
-* Attempt to skip a required field.
-
-.. code-block:: cpp
-
-   #include "arrow/io/file.h"
-   #include "parquet/stream_writer.h"
-
-   {
-      std::shared_ptr<arrow::io::FileOutputStream> outfile;
-
-      PARQUET_ASSIGN_OR_THROW(
-         outfile,
-         arrow::io::FileOutputStream::Open("test.parquet"));
-
-      parquet::WriterProperties::Builder builder;
-      std::shared_ptr<parquet::schema::GroupNode> schema;
-
-      // Set up builder with required compression type etc.
-      // Define schema.
-      // ...
-
-      parquet::StreamWriter os{
-         parquet::ParquetFileWriter::Open(outfile, schema, builder.build())};
-
-      // Loop over some data structure which provides the required
-      // fields to be written and write each row.
-      for (const auto& a : getArticles())
-      {
-         os << a.name() << a.price() << a.quantity() << parquet::EndRow;
-      }
-   }
diff --git a/docs/source/cpp/tables.rst b/docs/source/cpp/tables.rst
deleted file mode 100644
index ea91987..0000000
--- a/docs/source/cpp/tables.rst
+++ /dev/null
@@ -1,83 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. default-domain:: cpp
-.. highlight:: cpp
-
-============
-Tabular Data
-============
-
-.. seealso::
-   :doc:`Table and RecordBatch API reference <api/table>`.
-
-While arrays and chunked arrays represent a one-dimensional sequence of
-homogeneous values, data often comes in the form of two-dimensional sets of
-heterogeneous data (such as database tables, CSV files...).  Arrow provides
-several abstractions to handle such data conveniently and efficiently.
-
-Fields
-======
-
-Fields are used to denote the particular columns of a table (and also
-the particular members of a nested data type such as :class:`arrow::StructType`).
-A field, i.e. an instance of :class:`arrow::Field`, holds together a data
-type, a field name and some optional metadata.
-
-The recommended way to create a field is to call the :func:`arrow::field`
-factory function.
-
-Schemas
-=======
-
-A schema describes the overall structure of a two-dimensional dataset such
-as a table.  It holds a sequence of fields together with some optional
-schema-wide metadata (in addition to per-field metadata).  The recommended
-way to create a schema is to call one the :func:`arrow::schema` factory
-function overloads::
-
-   // Create a schema describing datasets with two columns:
-   // a int32 column "A" and a utf8-encoded string column "B"
-   std::shared_ptr<arrow::Field> field_a, field_b;
-   std::shared_ptr<arrow::Schema> schema;
-
-   field_a = arrow::field("A", arrow::int32());
-   field_b = arrow::field("B", arrow::utf8());
-   schema = arrow::schema({field_a, field_b});
-
-Tables
-======
-
-A :class:`arrow::Table` is a two-dimensional dataset with chunked arrays for
-columns, together with a schema providing field names.  Also, each chunked
-column must have the same logical length in number of elements (although each
-column can be chunked in a different way).
-
-Record Batches
-==============
-
-A :class:`arrow::RecordBatch` is a two-dimensional dataset of a number of
-contiguous arrays, each the same length.  Like a table, a record batch also
-has a schema which must match its arrays' datatypes.
-
-Record batches are a convenient unit of work for various serialization
-and computation functions, possibly incremental.
-
-A table can be streamed as an arbitrary number of record batches using
-a :class:`arrow::TableBatchReader`.  Conversely, a logical sequence of
-record batches can be assembled to form a table using one of the
-:func:`arrow::Table::FromRecordBatches` factory function overloads.
diff --git a/docs/source/developers/archery.rst b/docs/source/developers/archery.rst
deleted file mode 100644
index c5a508d..0000000
--- a/docs/source/developers/archery.rst
+++ /dev/null
@@ -1,84 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. _archery:
-
-Daily Development using Archery
-===============================
-
-To ease some of the daily development tasks, we developed a Python-written
-utility called Archery.
-
-Installation
-------------
-
-Archery requires Python 3.6 or later. It is recommended to install archery in
-*editable* mode with the ``-e`` flag to automatically update the installation
-when pulling the Arrow repository.
-
-.. code:: bash
-
-   pip install -e dev/archery
-
-Usage
------
-
-You can inspect Archery usage by passing the ``--help`` flag:
-
-.. code:: bash
-
-   $ archery --help
-   Usage: archery [OPTIONS] COMMAND [ARGS]...
-
-     Apache Arrow developer utilities.
-
-     See sub-commands help with `archery <cmd> --help`.
-
-   Options:
-     --debug      Increase logging with debugging output.
-     --pdb        Invoke pdb on uncaught exception.
-     -q, --quiet  Silence executed commands.
-     --help       Show this message and exit.
-
-   Commands:
-     benchmark    Arrow benchmarking.
-     build        Initialize an Arrow C++ build
-     docker       Interact with docker-compose based builds.
-     integration  Execute protocol and Flight integration tests
-     lint         Check Arrow source tree for errors
-     numpydoc     Lint python docstring with NumpyDoc
-     trigger-bot
-
-Archery exposes independent subcommands, each of which provides dedicated
-help output, for example:
-
-.. code:: bash
-
-   $ archery docker --help
-   Usage: archery docker [OPTIONS] COMMAND [ARGS]...
-
-     Interact with docker-compose based builds.
-
-   Options:
-     --src <arrow_src>  Specify Arrow source directory.
-     --help             Show this message and exit.
-
-   Commands:
-     images  List the available docker-compose images.
-     push    Push the generated docker-compose image.
-     run     Execute docker-compose builds.
-
diff --git a/docs/source/developers/benchmarks.rst b/docs/source/developers/benchmarks.rst
deleted file mode 100644
index 22eb515..0000000
--- a/docs/source/developers/benchmarks.rst
+++ /dev/null
@@ -1,179 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. _benchmarks:
-
-==========
-Benchmarks
-==========
-
-Setup
-=====
-
-First install the :ref:`Archery <archery>` utility to run the benchmark suite.
-
-Running the benchmark suite
-===========================
-
-The benchmark suites can be run with the ``benchmark run`` sub-command.
-
-.. code-block:: shell
-
-   # Run benchmarks in the current git workspace
-   archery benchmark run
-   # Storing the results in a file
-   archery benchmark run --output=run.json
-
-Sometimes, it is required to pass custom CMake flags, e.g.
-
-.. code-block:: shell
-
-   export CC=clang-8 CXX=clang++8
-   archery benchmark run --cmake-extras="-DARROW_SIMD_LEVEL=NONE"
-
-Additionally a full CMake build directory may be specified.
-
-.. code-block:: shell
-
-   archery benchmark run $HOME/arrow/cpp/release-build
-
-Comparison
-==========
-
-One goal with benchmarking is to detect performance regressions. To this end,
-``archery`` implements a benchmark comparison facility via the ``benchmark
-diff`` sub-command.
-
-In the default invocation, it will compare the current source (known as the
-current workspace in git) with local master branch:
-
-.. code-block:: shell
-
-  archery --quiet benchmark diff --benchmark-filter=FloatParsing
-  -----------------------------------------------------------------------------------
-  Non-regressions: (1)
-  -----------------------------------------------------------------------------------
-                 benchmark            baseline           contender  change % counters
-   FloatParsing<FloatType>  105.983M items/sec  105.983M items/sec       0.0       {}
-
-  ------------------------------------------------------------------------------------
-  Regressions: (1)
-  ------------------------------------------------------------------------------------
-                  benchmark            baseline           contender  change % counters
-   FloatParsing<DoubleType>  209.941M items/sec  109.941M items/sec   -47.632       {}
-
-For more information, invoke the ``archery benchmark diff --help`` command for
-multiple examples of invocation.
-
-Iterating efficiently
-~~~~~~~~~~~~~~~~~~~~~
-
-Iterating with benchmark development can be a tedious process due to long
-build time and long run times. Multiple tricks can be used with
-``archery benchmark diff`` to reduce this overhead.
-
-First, the benchmark command supports comparing existing
-build directories, This can be paired with the ``--preserve`` flag to
-avoid rebuilding sources from zero.
-
-.. code-block:: shell
-
-   # First invocation clone and checkouts in a temporary directory. The
-   # directory is preserved with --preserve
-   archery benchmark diff --preserve
-
-   # Modify C++ sources
-
-   # Re-run benchmark in the previously created build directory.
-   archery benchmark diff /tmp/arrow-bench*/{WORKSPACE,master}/build
-
-Second, a benchmark run result can be saved in a json file. This also avoids
-rebuilding the sources, but also executing the (sometimes) heavy benchmarks.
-This technique can be used as a poor's man caching.
-
-.. code-block:: shell
-
-   # Run the benchmarks on a given commit and save the result
-   archery benchmark run --output=run-head-1.json HEAD~1
-   # Compare the previous captured result with HEAD
-   archery benchmark diff HEAD run-head-1.json
-
-Third, the benchmark command supports filtering suites (``--suite-filter``)
-and benchmarks (``--benchmark-filter``), both options supports regular
-expressions.
-
-.. code-block:: shell
-
-   # Taking over a previous run, but only filtering for benchmarks matching
-   # `Kernel` and suite matching `compute-aggregate`.
-   archery benchmark diff                                       \
-     --suite-filter=compute-aggregate --benchmark-filter=Kernel \
-     /tmp/arrow-bench*/{WORKSPACE,master}/build
-
-Instead of rerunning benchmarks on comparison, a JSON file (generated by
-``archery benchmark run``) may be specified for the contender and/or the
-baseline.
-
-.. code-block:: shell
-
-   archery benchmark run --output=baseline.json $HOME/arrow/cpp/release-build
-   git checkout some-feature
-   archery benchmark run --output=contender.json $HOME/arrow/cpp/release-build
-   archery benchmark diff contender.json baseline.json
-
-Regression detection
-====================
-
-Writing a benchmark
-~~~~~~~~~~~~~~~~~~~
-
-1. The benchmark command will filter (by default) benchmarks with the regular
-   expression ``^Regression``. This way, not all benchmarks are run by default.
-   Thus, if you want your benchmark to be verified for regression
-   automatically, the name must match.
-
-2. The benchmark command will run with the ``--benchmark_repetitions=K``
-   options for statistical significance. Thus, a benchmark should not override
-   the repetitions in the (C++) benchmark's arguments definition.
-
-3. Due to #2, a benchmark should run sufficiently fast. Often, when the input
-   does not fit in memory (L2/L3), the benchmark will be memory bound instead
-   of CPU bound. In this case, the input can be downsized.
-
-4. By default, google's benchmark library will use the cputime metric, which
-   is the sum of runtime dedicated on the CPU for all threads of the process.
-   By contrast to realtime which is the wall clock time, e.g. the difference
-   between end_time - start_time. In a single thread model, the cputime is
-   preferable since it is less affected by context switching. In a multi thread
-   scenario, the cputime will give incorrect result since the since it'll
-   be inflated by the number of threads and can be far off realtime. Thus, if
-   the benchmark is multi threaded, it might be better to use
-   ``SetRealtime()``, see this `example <https://github.com/apache/arrow/blob/a9582ea6ab2db055656809a2c579165fe6a811ba/cpp/src/arrow/io/memory-benchmark.cc#L223-L227>`_.
-
-Scripting
-=========
-
-``archery`` is written as a python library with a command line frontend. The
-library can be imported to automate some tasks.
-
-Some invocation of the command line interface can be quite verbose due to build
-output. This can be controlled/avoided with the ``--quiet`` option or the
-``--output=<file>`` can be used, e.g.
-
-.. code-block:: shell
-
-   archery benchmark diff --benchmark-filter=Kernel --output=compare.json ...
diff --git a/docs/source/developers/contributing.rst b/docs/source/developers/contributing.rst
deleted file mode 100644
index 9aecf8a..0000000
--- a/docs/source/developers/contributing.rst
+++ /dev/null
@@ -1,360 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. _contributing:
-
-****************************
-Contributing to Apache Arrow
-****************************
-
-Thanks for your interest in the Apache Arrow project. Arrow is a large project
-and may seem overwhelming when you're first getting involved.
-Contributing code is great, but that's probably not the first place to start.
-There are lots of ways to make valuable contributions to the project and
-community.
-
-This page provides some orientation for how to get involved. It also offers
-some recommendations on how to get best results when engaging with the
-community.
-
-Code of Conduct
-===============
-
-All participation in the Apache Arrow project is governed by the ASF's
-`Code of Conduct <https://www.apache.org/foundation/policies/conduct.html>`_.
-
-Join the mailing lists
-======================
-
-A good first step to getting involved in the Arrow project is to join the
-mailing lists and participate in discussions where you can.
-Projects in The Apache Software Foundation ("the ASF") use public, archived
-mailing lists to create a public record of each project's development
-activities and decision-making process.
-While lacking the immediacy of chat or other forms of communication,
-the mailing lists give participants the opportunity to slow down and be
-thoughtful in their responses, and they help developers who are spread across
-many timezones to participate more equally.
-
-See `the community page <https://arrow.apache.org/community/>`_ for links to
-subscribe to the mailing lists and to view archives.
-
-Report bugs and propose features
-================================
-
-Using the software and sharing your experience is a very helpful contribution
-itself. Those who actively develop Arrow need feedback from users on what
-works and what doesn't. Alerting us to unexpected behavior and missing features,
-even if you can't solve the problems yourself, help us understand and prioritize
-work to improve the libraries.
-
-We use `JIRA <https://issues.apache.org/jira/projects/ARROW/issues>`_
-to manage our development "todo" list and to maintain changelogs for releases.
-In addition, the project's `Confluence site <https://cwiki.apache.org/confluence/display/ARROW>`_
-has some useful higher-level views of the JIRA issues.
-
-To create a JIRA issue, you'll need to have an account on the ASF JIRA, which
-you can `sign yourself up for <https://issues.apache.org/jira/secure/Signup!default.jspa>`_.
-The JIRA server hosts bugs and issues for multiple Apache projects. The JIRA
-project name for Arrow is "ARROW".
-
-You don't need any special permissions on JIRA to be able to create issues.
-Once you are more involved in the project and want to do more on JIRA, such as
-assign yourself an issue, you will need "Contributor" permissions on the
-Apache Arrow JIRA. To get this role, ask on the mailing list for a project
-maintainer's help.
-
-Tips for using JIRA
-+++++++++++++++++++
-
-Before you create a new issue, we recommend you first
-`search <https://issues.apache.org/jira/issues/?jql=project%20%3D%20ARROW%20AND%20resolution%20%3D%20Unresolved>`_
-among existing Arrow issues.
-
-When reporting a new issue, follow these conventions to help make sure the
-right people see it:
-
-* Use the **Component** field to indicate the area of the project that your
-  issue pertains to (for example "Python" or "C++").
-* Also prefix the issue title with the component name in brackets, for example
-  ``[Python] issue name`` ; this helps when navigating lists of open issues,
-  and it also makes our changelogs more readable. Most prefixes are exactly the 
-  same as the **Component** name, with the following exceptions:
-
-  * **Component:** Continuous Integration — **Summary prefix:** [CI]
-  * **Component:** Developer Tools — **Summary prefix:** [Dev]
-  * **Component:** Documentation — **Summary prefix:** [Docs]
-
-* If you're reporting something that used to work in a previous version
-  but doesn't work in the current release, you can add the "Affects version"
-  field. For feature requests and other proposals, "Affects version" isn't
-  appropriate.
-
-Project maintainers may later tweak formatting and labels to help improve their
-visibility. They may add a "Fix version" to indicate that they're considering
-it for inclusion in the next release, though adding that tag is not a
-commitment that it will be done in the next release.
-
-Tips for successful bug reports
-+++++++++++++++++++++++++++++++
-
-No one likes having bugs in their software, and in an ideal world, all bugs
-would get fixed as soon as they were reported. However, time and attention are
-finite, especially in an open-source project where most contributors are
-participating in their spare time. All contributors in Apache projects are
-volunteers and act as individuals, even if they are contributing to the project
-as part of their job responsibilities.
-
-In order for your bug to get prompt
-attention, there are things you can do to make it easier for contributors to
-reproduce and fix it.
-When you're reporting a bug, please help us understand the issue by providing,
-to the best of your ability,
-
-* Clear, minimal steps to reproduce the issue, with as few non-Arrow
-  dependencies as possible. If there's a problem on reading a file, try to
-  provide as small of an example file as possible, or code to create one.
-  If your bug report says "it crashes trying to read my file, but I can't
-  share it with you," it's really hard for us to debug.
-* Any relevant operating system, language, and library version information
-* If it isn't obvious, clearly state the expected behavior and what actually
-  happened.
-
-If a developer can't get a failing unit test, they won't be able to know that
-the issue has been identified, and they won't know when it has been fixed.
-Try to anticipate the questions you might be asked by someone working to
-understand the issue and provide those supporting details up front.
-
-Other resources:
-
-* `Mozilla's bug-reporting guidelines <https://developer.mozilla.org/en-US/docs/Mozilla/QA/Bug_writing_guidelines>`_
-* `Reprex do's and don'ts <https://reprex.tidyverse.org/articles/reprex-dos-and-donts.html>`_
-
-Improve documentation
-=====================
-
-A great way to contribute to the project is to improve documentation. If you
-found some docs to be incomplete or inaccurate, share your hard-earned knowledge
-with the rest of the community.
-
-Documentation improvements are also a great way to gain some experience with
-our submission and review process, discussed below, without requiring a lot
-of local development environment setup. In fact, many documentation-only changes
-can be made directly in the GitHub web interface by clicking the "edit" button.
-This will handle making a fork and a pull request for you.
-
-Contribute code
-===============
-
-Code contributions, or "patches", are delivered in the form of GitHub pull
-requests against the `github.com/apache/arrow
-<https://github.com/apache/arrow>`_ repository.
-
-Before starting
-+++++++++++++++
-
-You'll first need to select a JIRA issue to work on. Perhaps you're working on
-one you reported yourself. Otherwise, if you're looking for something,
-`search <https://issues.apache.org/jira/issues/?jql=project%20%3D%20ARROW%20AND%20resolution%20%3D%20Unresolved>`_
-the open issues. Anything that's not in the "In Progress" state is fair game,
-even if it is "Assigned" to someone, particularly if it has not been
-recently updated. When in doubt, comment on the issue asking if they mind
-if you try to put together a pull request; interpret no response to mean that
-you're free to proceed.
-
-Please do ask questions, either on the JIRA itself or on the dev mailing list,
-if you have doubts about where to begin or what approach to take.
-This is particularly a good idea if this is your first code contribution,
-so you can get some sense of what the core developers in this part of the
-project think a good solution looks like. For best results, ask specific,
-direct questions, such as:
-
-* Do you think $PROPOSED_APPROACH is the right one?
-* In which file(s) should I be looking to make changes?
-* Is there anything related in the codebase I can look at to learn?
-
-If you ask these questions and do not get an answer, it is OK to ask again.
-
-Pull request and review
-+++++++++++++++++++++++
-
-To contribute a patch:
-
-* Submit the patch as a GitHub pull request against the master branch. For a
-  tutorial, see the GitHub guides on `forking a repo <https://help.github.com/en/articles/fork-a-repo>`_
-  and `sending a pull request <https://help.github.com/en/articles/creating-a-pull-request-from-a-fork>`_.
-  So that your pull request syncs with the JIRA issue, prefix your pull request
-  name with the JIRA issue id (ex:
-  `ARROW-767: [C++] Filesystem abstraction <https://github.com/apache/arrow/pull/4225>`_).
-* Give the pull request a clear, brief description: when the pull request is
-  merged, this will be retained in the extended commit message.
-* Make sure that your code passes the unit tests. You can find instructions how
-  to run the unit tests for each Arrow component in its respective README file.
-
-Core developers and others with a stake in the part of the project your change
-affects will review, request changes, and hopefully indicate their approval
-in the end. To make the review process smooth for everyone, try to
-
-* Break your work into small, single-purpose patches if possible. It’s much
-  harder to merge in a large change with a lot of disjoint features, and
-  particularly if you're new to the project, smaller changes are much easier
-  for maintainers to accept.
-* Add new unit tests for your code.
-* Follow the style guides for the part(s) of the project you're modifying.
-  Some languages (C++, Python, and Rust, for example) run a lint check in
-  continuous integration. For all languages, see their respective developer
-  documentation and READMEs for style guidance. In general, try to make it look
-  as if the codebase has a single author, and emulate any conventions you see,
-  whether or not they are officially documented or checked.
-
-When tests are passing and the pull request has been approved by the interested
-parties, a `committer <https://arrow.apache.org/committers/>`_
-will merge the pull request. This is done with a
-command-line utility that does a squash merge, so all of your commits will be
-registered as a single commit to the master branch; this simplifies the
-connection between JIRA issues and commits, makes it easier to bisect
-history to identify where changes were introduced, and helps us be able to
-cherry-pick individual patches onto a maintenance branch.
-
-A side effect of this way of
-merging is that your pull request will appear in the GitHub interface to have
-been "closed without merge". Do not be alarmed: if you look at the bottom, you
-will see a message that says ``@user closed this in $COMMIT``. In the commit
-message of that commit, the merge tool adds the pull request description, a
-link back to the pull request, and attribution to the contributor and any
-co-authors.
-
-Local git conventions
-+++++++++++++++++++++
-
-If you are tracking the Arrow source repository locally, here are some tips
-for using ``git``.
-
-All Arrow contributors work off of their personal fork of ``apache/arrow``
-and submit pull requests "upstream". Once you've cloned your fork of Arrow,
-be sure to::
-
-    $ git remote add upstream https://github.com/apache/arrow
-
-to set the "upstream" repository.
-
-You are encouraged to develop on branches, rather than your own "master" branch,
-and it helps to keep your fork's master branch synced with ``upstream/master``.
-
-To start a new branch, pull the latest from upstream first::
-
-   $ git fetch upstream
-   $ git checkout master
-   $ git pull --ff-only upstream master
-   $ git checkout -b $BRANCH
-
-It does not matter what you call your branch. Some people like to use the JIRA
-number as branch name, others use descriptive names.
-
-Once you have a branch going, you should sync with ``upstream/master``
-regularly, as many commits are merged to master every day.
-It is recommended to use ``git rebase`` rather than ``git merge``.
-To sync your local copy of a branch, you may do the following::
-
-    $ git pull upstream $BRANCH --rebase
-
-This will rebase your local commits on top of the tip of ``upstream/$BRANCH``.  In case
-there are conflicts, and your local commit history has multiple commits, you may
-simplify the conflict resolution process by squashing your local commits into a single
-commit. Preserving the commit history isn't as important because when your
-feature branch is merged upstream, a squash happens automatically.  If you choose this
-route, you can abort the rebase with::
-
-    $ git rebase --abort
-
-Following which, the local commits can be squashed interactively by running::
-
-    $ git rebase --interactive ORIG_HEAD~n
-
-Where ``n`` is the number of commits you have in your local branch.  After the squash,
-you can try the merge again, and this time conflict resolution should be relatively
-straightforward.
-
-If you set the following in your repo's ``.git/config``, the ``--rebase`` option can be
-omitted from the ``git pull`` command, as it is implied by default. ::
-
-    [pull]
-            rebase = true
-
-Once you have an updated local copy, you can push to your remote repo.  Note, since your
-remote repo still holds the old history, you would need to do a force push. ::
-
-    $ git push --force origin branch
-
-*Note about force pushing to a branch that is being reviewed:* if you want reviewers to
-look at your updates, please ensure you comment on the PR on GitHub as simply force
-pushing does not trigger a notification in the GitHub user interface.
-
-Also, once you have a pull request up, be sure you pull from ``origin``
-before rebasing and force-pushing. Arrow maintainers can push commits directly
-to your branch, which they sometimes do to help move a pull request along.
-In addition, the GitHub PR "suggestion" feature can also add commits to
-your branch, so it is possible that your local copy of your branch is missing
-some additions.
-
-Guidance for specific features
-==============================
-
-From time to time the community has discussions on specific types of features
-and improvements that they expect to support.  This section outlines decisions
-that have been made in this regard.
-
-Endianness
-++++++++++
-
-The Arrow format allows setting endianness.  Due to the popularity of
-little endian architectures most of implementation assume little endian by
-default. There has been some  effort to support big endian platforms as well.
-Based on a `mailing-list discussion
-<https://mail-archives.apache.org/mod_mbox/arrow-dev/202009.mbox/%3cCAK7Z5T--HHhr9Dy43PYhD6m-XoU4qoGwQVLwZsG-kOxXjPTyZA@mail.gmail.com%3e>`__,
-the requirements for a new platform are:
-
-1. A robust (non-flaky, returning results in a reasonable time) Continuous
-   Integration setup.
-2. Benchmarks for performance critical parts of the code to demonstrate
-   no regression.
-
-Furthermore, for big-endian support, there are two levels that an
-implementation can support:
-
-1. Native endianness (all Arrow communication happens with processes of the
-   same endianness).  This includes ancillary functionality such as reading
-   and writing various file formats, such as Parquet.
-2. Cross endian support (implementations will do byte reordering when
-   appropriate for :ref:`IPC <format-ipc>` and :ref:`Flight <flight-rpc>`
-   messages).
-
-The decision on what level to support is based on maintainers' preferences for
-complexity and technical risk.  In general all implementations should be open
-to native endianness support (provided the CI and performance requirements
-are met).  Cross endianness support is a question for individual maintainers.
-
-The current implementations aiming for cross endian support are:
-
-1. C++
-
-Implementations that do not intend to implement cross endian support:
-
-1. Java
-
-For other libraries, a discussion to gather consensus on the mailing-list
-should be had before submitting PRs.
diff --git a/docs/source/developers/cpp/building.rst b/docs/source/developers/cpp/building.rst
deleted file mode 100644
index dfaee0f..0000000
--- a/docs/source/developers/cpp/building.rst
+++ /dev/null
@@ -1,481 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. _building-arrow-cpp:
-
-==================
-Building Arrow C++
-==================
-
-System setup
-============
-
-Arrow uses CMake as a build configuration system. We recommend building
-out-of-source. If you are not familiar with this terminology:
-
-* **In-source build**: ``cmake`` is invoked directly from the ``cpp``
-  directory. This can be inflexible when you wish to maintain multiple build
-  environments (e.g. one for debug builds and another for release builds)
-* **Out-of-source build**: ``cmake`` is invoked from another directory,
-  creating an isolated build environment that does not interact with any other
-  build environment. For example, you could create ``cpp/build-debug`` and
-  invoke ``cmake $CMAKE_ARGS ..`` from this directory
-
-Building requires:
-
-* A C++11-enabled compiler. On Linux, gcc 4.8 and higher should be
-  sufficient. For Windows, at least Visual Studio 2015 is required.
-* CMake 3.5 or higher
-* On Linux and macOS, either ``make`` or ``ninja`` build utilities
-
-On Ubuntu/Debian you can install the requirements with:
-
-.. code-block:: shell
-
-   sudo apt-get install \
-        build-essential \
-        cmake
-
-On Alpine Linux:
-
-.. code-block:: shell
-
-   apk add autoconf \
-           bash \
-           cmake \
-           g++ \
-           gcc \
-           make
-
-On macOS, you can use `Homebrew <https://brew.sh/>`_:
-
-.. code-block:: shell
-
-   git clone https://github.com/apache/arrow.git
-   cd arrow
-   brew update && brew bundle --file=cpp/Brewfile
-
-With `vcpkg <https://github.com/Microsoft/vcpkg>`_:
-
-.. code-block:: shell
-   
-   git clone https://github.com/apache/arrow.git
-   cd arrow
-   vcpkg install \
-     --x-manifest-root cpp \
-     --clean-after-build
-
-On MSYS2:
-
-.. code-block:: shell
-
-   pacman --sync --refresh --noconfirm \
-     ccache \
-     git \
-     mingw-w64-${MSYSTEM_CARCH}-boost \
-     mingw-w64-${MSYSTEM_CARCH}-brotli \
-     mingw-w64-${MSYSTEM_CARCH}-cmake \
-     mingw-w64-${MSYSTEM_CARCH}-gcc \
-     mingw-w64-${MSYSTEM_CARCH}-gflags \
-     mingw-w64-${MSYSTEM_CARCH}-glog \
-     mingw-w64-${MSYSTEM_CARCH}-gtest \
-     mingw-w64-${MSYSTEM_CARCH}-lz4 \
-     mingw-w64-${MSYSTEM_CARCH}-protobuf \
-     mingw-w64-${MSYSTEM_CARCH}-python3-numpy \
-     mingw-w64-${MSYSTEM_CARCH}-rapidjson \
-     mingw-w64-${MSYSTEM_CARCH}-snappy \
-     mingw-w64-${MSYSTEM_CARCH}-thrift \
-     mingw-w64-${MSYSTEM_CARCH}-zlib \
-     mingw-w64-${MSYSTEM_CARCH}-zstd
-
-Building
-========
-
-The build system uses ``CMAKE_BUILD_TYPE=release`` by default, so if this
-argument is omitted then a release build will be produced.
-
-.. note::
-
-   You need to more options to build on Windows. See
-   :ref:`developers-cpp-windows` for details.
-
-Minimal release build:
-
-.. code-block:: shell
-
-   git clone https://github.com/apache/arrow.git
-   cd arrow/cpp
-   mkdir release
-   cd release
-   cmake ..
-   make
-
-Minimal debug build with unit tests:
-
-.. code-block:: shell
-
-   git clone https://github.com/apache/arrow.git
-   cd arrow/cpp
-   mkdir debug
-   cd debug
-   cmake -DCMAKE_BUILD_TYPE=Debug -DARROW_BUILD_TESTS=ON ..
-   make unittest
-
-The unit tests are not built by default. After building, one can also invoke
-the unit tests using the ``ctest`` tool provided by CMake (note that ``test``
-depends on ``python`` being available).
-
-On some Linux distributions, running the test suite might require setting an
-explicit locale. If you see any locale-related errors, try setting the
-environment variable (which requires the `locales` package or equivalent):
-
-.. code-block:: shell
-
-   export LC_ALL="en_US.UTF-8"
-
-Faster builds with Ninja
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-Many contributors use the `Ninja build system <https://ninja-build.org/>`_ to
-get faster builds. It especially speeds up incremental builds. To use
-``ninja``, pass ``-GNinja`` when calling ``cmake`` and then use the ``ninja``
-command instead of ``make``.
-
-Optional Components
-~~~~~~~~~~~~~~~~~~~
-
-By default, the C++ build system creates a fairly minimal build. We have
-several optional system components which you can opt into building by passing
-boolean flags to ``cmake``.
-
-* ``-DARROW_COMPUTE=ON``: Computational kernel functions and other support
-* ``-DARROW_CSV=ON``: CSV reader module
-* ``-DARROW_CUDA=ON``: CUDA integration for GPU development. Depends on NVIDIA
-  CUDA toolkit. The CUDA toolchain used to build the library can be customized
-  by using the ``$CUDA_HOME`` environment variable.
-* ``-DARROW_DATASET=ON``: Dataset API, implies the Filesystem API
-* ``-DARROW_FILESYSTEM=ON``: Filesystem API for accessing local and remote
-  filesystems
-* ``-DARROW_FLIGHT=ON``: Arrow Flight RPC system, which depends at least on
-  gRPC
-* ``-DARROW_GANDIVA=ON``: Gandiva expression compiler, depends on LLVM,
-  Protocol Buffers, and re2
-* ``-DARROW_GANDIVA_JAVA=ON``: Gandiva JNI bindings for Java
-* ``-DARROW_HDFS=ON``: Arrow integration with libhdfs for accessing the Hadoop
-  Filesystem
-* ``-DARROW_HIVESERVER2=ON``: Client library for HiveServer2 database protocol
-* ``-DARROW_JSON=ON``: JSON reader module
-* ``-DARROW_ORC=ON``: Arrow integration with Apache ORC
-* ``-DARROW_PARQUET=ON``: Apache Parquet libraries and Arrow integration
-* ``-DARROW_PLASMA=ON``: Plasma Shared Memory Object Store
-* ``-DARROW_PLASMA_JAVA_CLIENT=ON``: Build Java client for Plasma
-* ``-DARROW_PYTHON=ON``: Arrow Python C++ integration library (required for
-  building pyarrow). This library must be built against the same Python version
-  for which you are building pyarrow. NumPy must also be installed. Enabling
-  this option also enables ``ARROW_COMPUTE``, ``ARROW_CSV``, ``ARROW_DATASET``,
-  ``ARROW_FILESYSTEM``, ``ARROW_HDFS``, and ``ARROW_JSON``.
-* ``-DARROW_S3=ON``: Support for Amazon S3-compatible filesystems
-* ``-DARROW_WITH_BZ2=ON``: Build support for BZ2 compression
-* ``-DARROW_WITH_ZLIB=ON``: Build support for zlib (gzip) compression
-* ``-DARROW_WITH_LZ4=ON``: Build support for lz4 compression
-* ``-DARROW_WITH_SNAPPY=ON``: Build support for Snappy compression
-* ``-DARROW_WITH_ZSTD=ON``: Build support for ZSTD compression
-* ``-DARROW_WITH_BROTLI=ON``: Build support for Brotli compression
-
-Some features of the core Arrow shared library can be switched off for improved
-build times if they are not required for your application:
-
-* ``-DARROW_IPC=ON``: build the IPC extensions
-
-Optional Targets
-~~~~~~~~~~~~~~~~
-
-For development builds, you will often want to enable additional targets in
-enable to exercise your changes, using the following ``cmake`` options.
-
-* ``-DARROW_BUILD_BENCHMARKS=ON``: Build executable benchmarks.
-* ``-DARROW_BUILD_EXAMPLES=ON``: Build examples of using the Arrow C++ API.
-* ``-DARROW_BUILD_INTEGRATION=ON``: Build additional executables that are
-  used to exercise protocol interoperability between the different Arrow
-  implementations.
-* ``-DARROW_BUILD_UTILITIES=ON``: Build executable utilities.
-* ``-DARROW_BUILD_TESTS=ON``: Build executable unit tests.
-* ``-DARROW_ENABLE_TIMING_TESTS=ON``: If building unit tests, enable those
-  unit tests that rely on wall-clock timing (this flag is disabled on CI
-  because it can make test results flaky).
-* ``-DARROW_FUZZING=ON``: Build fuzz targets and related executables.
-
-Optional Checks
-~~~~~~~~~~~~~~~
-
-The following special checks are available as well.  They instrument the
-generated code in various ways so as to detect select classes of problems
-at runtime (for example when executing unit tests).
-
-* ``-DARROW_USE_ASAN=ON``: Enable Address Sanitizer to check for memory leaks,
-  buffer overflows or other kinds of memory management issues.
-* ``-DARROW_USE_TSAN=ON``: Enable Thread Sanitizer to check for races in
-  multi-threaded code.
-* ``-DARROW_USE_UBSAN=ON``: Enable Undefined Behavior Sanitizer to check for
-  situations which trigger C++ undefined behavior.
-
-Some of those options are mutually incompatible, so you may have to build
-several times with different options if you want to exercise all of them.
-
-CMake version requirements
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-While we support CMake 3.5 and higher, some features require a newer version of
-CMake:
-
-* Building the benchmarks requires 3.6 or higher
-* Building zstd from source requires 3.7 or higher
-* Building Gandiva JNI bindings requires 3.11 or higher
-
-LLVM and Clang Tools
-~~~~~~~~~~~~~~~~~~~~
-
-We are currently using LLVM 8 for library builds and for other developer tools
-such as code formatting with ``clang-format``. LLVM can be installed via most
-modern package managers (apt, yum, conda, Homebrew, vcpkg, chocolatey).
-
-.. _cpp-build-dependency-management:
-
-Build Dependency Management
-===========================
-
-The build system supports a number of third-party dependencies
-
-  * ``AWSSDK``: for S3 support, requires system cURL even we use the
-    ``BUNDLE`` method described below
-  * ``benchmark``: Google benchmark, for testing
-  * ``Boost``: for cross-platform support
-  * ``Brotli``: for data compression
-  * ``BZip2``: for data compression
-  * ``c-ares``: a dependency of gRPC
-  * ``gflags``: for command line utilities (formerly Googleflags)
-  * ``GLOG``: for logging
-  * ``gRPC``: for remote procedure calls
-  * ``GTest``: Googletest, for testing
-  * ``LLVM``: a dependency of Gandiva
-  * ``Lz4``: for data compression
-  * ``ORC``: for Apache ORC format support
-  * ``re2``: for compute kernels and Gandiva, a dependency of gRPC
-  * ``Protobuf``: Google Protocol Buffers, for data serialization
-  * ``RapidJSON``: for data serialization
-  * ``Snappy``: for data compression
-  * ``Thrift``: Apache Thrift, for data serialization
-  * ``utf8proc``: for compute kernels
-  * ``ZLIB``: for data compression
-  * ``zstd``: for data compression
-
-The CMake option ``ARROW_DEPENDENCY_SOURCE`` is a global option that instructs
-the build system how to resolve each dependency. There are a few options:
-
-* ``AUTO``: Try to find package in the system default locations and build from
-  source if not found
-* ``BUNDLED``: Building the dependency automatically from source
-* ``SYSTEM``: Finding the dependency in system paths using CMake's built-in
-  ``find_package`` function, or using ``pkg-config`` for packages that do not
-  have this feature
-* ``CONDA``: Use ``$CONDA_PREFIX`` as alternative ``SYSTEM`` PATH
-* ``VCPKG``: Find dependencies installed by vcpkg, and if not found, run
-  ``vpckg install`` to install them
-* ``BREW``: Use Homebrew default paths as an alternative ``SYSTEM`` path
-
-The default method is ``AUTO`` unless you are developing within an active conda
-environment (detected by presence of the ``$CONDA_PREFIX`` environment
-variable), in which case it is ``CONDA``.
-
-Individual Dependency Resolution
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-While ``-DARROW_DEPENDENCY_SOURCE=$SOURCE`` sets a global default for all
-packages, the resolution strategy can be overridden for individual packages by
-setting ``-D$PACKAGE_NAME_SOURCE=..``. For example, to build Protocol Buffers
-from source, set
-
-.. code-block:: shell
-
-   -DProtobuf_SOURCE=BUNDLED
-
-This variable is unfortunately case-sensitive; the name used for each package
-is listed above, but the most up-to-date listing can be found in
-`cpp/cmake_modules/ThirdpartyToolchain.cmake <https://github.com/apache/arrow/blob/master/cpp/cmake_modules/ThirdpartyToolchain.cmake>`_.
-
-Bundled Dependency Versions
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-When using the ``BUNDLED`` method to build a dependency from source, the
-version number from ``cpp/thirdparty/versions.txt`` is used. There is also a
-dependency source downloader script (see below), which can be used to set up
-offline builds.
-
-When using ``BUNDLED`` for dependency resolution (and if you use either the
-jemalloc or mimalloc allocators, which are recommended), statically linking the
-Arrow libraries in a third party project is more complex. See below for
-instructions about how to configure your build system in this case.
-
-Boost-related Options
-~~~~~~~~~~~~~~~~~~~~~
-
-We depend on some Boost C++ libraries for cross-platform support. In most cases,
-the Boost version available in your package manager may be new enough, and the
-build system will find it automatically. If you have Boost installed in a
-non-standard location, you can specify it by passing
-``-DBOOST_ROOT=$MY_BOOST_ROOT`` or setting the ``BOOST_ROOT`` environment
-variable.
-
-Offline Builds
-~~~~~~~~~~~~~~
-
-If you do not use the above variables to direct the Arrow build system to
-preinstalled dependencies, they will be built automatically by the Arrow build
-system. The source archive for each dependency will be downloaded via the
-internet, which can cause issues in environments with limited access to the
-internet.
-
-To enable offline builds, you can download the source artifacts yourself and
-use environment variables of the form ``ARROW_$LIBRARY_URL`` to direct the
-build system to read from a local file rather than accessing the internet.
-
-To make this easier for you, we have prepared a script
-``thirdparty/download_dependencies.sh`` which will download the correct version
-of each dependency to a directory of your choosing. It will print a list of
-bash-style environment variable statements at the end to use for your build
-script.
-
-.. code-block:: shell
-
-   # Download tarballs into $HOME/arrow-thirdparty
-   $ ./thirdparty/download_dependencies.sh $HOME/arrow-thirdparty
-
-You can then invoke CMake to create the build directory and it will use the
-declared environment variable pointing to downloaded archives instead of
-downloading them (one for each build dir!).
-
-Statically Linking
-~~~~~~~~~~~~~~~~~~
-
-When ``-DARROW_BUILD_STATIC=ON``, all build dependencies built as static
-libraries by the Arrow build system will be merged together to create a static
-library ``arrow_bundled_dependencies``. In UNIX-like environments (Linux, macOS,
-MinGW), this is called ``libarrow_bundled_dependencies.a`` and on Windows with
-Visual Studio ``arrow_bundled_dependencies.lib``. This "dependency bundle"
-library is installed in the same place as the other Arrow static libraries.
-
-If you are using CMake, the bundled dependencies will automatically be included
-when linking if you use the ``arrow_static`` CMake target. In other build
-systems, you may need to explicitly link to the dependency bundle. We created
-an `example CMake-based build configuration
-<https://github.com/apache/arrow/tree/master/cpp/examples/minimal_build>`_ to
-show you a working example.
-
-On Linux and macOS, if your application does not link to the ``pthread``
-library already, you must include ``-pthread`` in your linker setup. In CMake
-this can be accomplished with the ``Threads`` built-in package:
-
-.. code-block:: cmake
-
-   set(THREADS_PREFER_PTHREAD_FLAG ON)
-   find_package(Threads REQUIRED)
-   target_link_libraries(my_target PRIVATE Threads::Threads)
-
-Extra debugging help
-~~~~~~~~~~~~~~~~~~~~
-
-If you use the CMake option ``-DARROW_EXTRA_ERROR_CONTEXT=ON`` it will compile
-the libraries with extra debugging information on error checks inside the
-``RETURN_NOT_OK`` macro. In unit tests with ``ASSERT_OK``, this will yield error
-outputs like:
-
-.. code-block:: shell
-
-   ../src/arrow/ipc/ipc-read-write-test.cc:609: Failure
-   Failed
-   ../src/arrow/ipc/metadata-internal.cc:508 code: TypeToFlatbuffer(fbb, *field.type(), &children, &layout, &type_enum, dictionary_memo, &type_offset)
-   ../src/arrow/ipc/metadata-internal.cc:598 code: FieldToFlatbuffer(fbb, *schema.field(i), dictionary_memo, &offset)
-   ../src/arrow/ipc/metadata-internal.cc:651 code: SchemaToFlatbuffer(fbb, schema, dictionary_memo, &fb_schema)
-   ../src/arrow/ipc/writer.cc:697 code: WriteSchemaMessage(schema_, dictionary_memo_, &schema_fb)
-   ../src/arrow/ipc/writer.cc:730 code: WriteSchema()
-   ../src/arrow/ipc/writer.cc:755 code: schema_writer.Write(&dictionaries_)
-   ../src/arrow/ipc/writer.cc:778 code: CheckStarted()
-   ../src/arrow/ipc/ipc-read-write-test.cc:574 code: writer->WriteRecordBatch(batch)
-   NotImplemented: Unable to convert type: decimal(19, 4)
-
-Deprecations and API Changes
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-We use the compiler definition ``ARROW_NO_DEPRECATED_API`` to disable APIs that
-have been deprecated. It is a good practice to compile third party applications
-with this flag to proactively catch and account for API changes.
-
-Modular Build Targets
-~~~~~~~~~~~~~~~~~~~~~
-
-Since there are several major parts of the C++ project, we have provided
-modular CMake targets for building each library component, group of unit tests
-and benchmarks, and their dependencies:
-
-* ``make arrow`` for Arrow core libraries
-* ``make parquet`` for Parquet libraries
-* ``make gandiva`` for Gandiva (LLVM expression compiler) libraries
-* ``make plasma`` for Plasma libraries, server
-
-.. note::
-   If you have selected Ninja as CMake generator, replace ``make arrow`` with
-   ``ninja arrow``, and so on.
-
-To build the unit tests or benchmarks, add ``-tests`` or ``-benchmarks``
-to the target name. So ``make arrow-tests`` will build the Arrow core unit
-tests. Using the ``-all`` target, e.g. ``parquet-all``, will build everything.
-
-If you wish to only build and install one or more project subcomponents, we
-have provided the CMake option ``ARROW_OPTIONAL_INSTALL`` to only install
-targets that have been built. For example, if you only wish to build the
-Parquet libraries, its tests, and its dependencies, you can run:
-
-.. code-block:: shell
-
-   cmake .. -DARROW_PARQUET=ON \
-         -DARROW_OPTIONAL_INSTALL=ON \
-         -DARROW_BUILD_TESTS=ON
-   make parquet
-   make install
-
-If you omit an explicit target when invoking ``make``, all targets will be
-built.
-
-Debugging with Xcode on macOS
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Xcode is the IDE provided with macOS and can be use to develop and debug Arrow
-by generating an Xcode project:
-
-.. code-block:: shell
-
-   cd cpp
-   mkdir xcode-build
-   cd xcode-build
-   cmake .. -G Xcode -DARROW_BUILD_TESTS=ON -DCMAKE_BUILD_TYPE=DEBUG
-   open arrow.xcodeproj
-
-This will generate a project and open it in the Xcode app. As an alternative,
-the command ``xcodebuild`` will perform a command-line build using the
-generated project. It is recommended to use the "Automatically Create Schemes"
-option when first launching the project.  Selecting an auto-generated scheme
-will allow you to build and run a unittest with breakpoints enabled.
diff --git a/docs/source/developers/cpp/conventions.rst b/docs/source/developers/cpp/conventions.rst
deleted file mode 100644
index 9db15fb..0000000
--- a/docs/source/developers/cpp/conventions.rst
+++ /dev/null
@@ -1,90 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. highlight:: cpp
-
-===========
-Conventions
-===========
-
-This section provides some information about some of the abstractions and
-development approaches we use to solve problems common to many parts of the C++
-project.
-
-File Naming
-===========
-
-C++ source and header files should use underscores for word separation, not hyphens.
-Compiled executables, however, will automatically use hyphens (such that
-e.g. ``src/arrow/scalar_test.cc`` will be compiled into ``arrow-scalar-test``).
-
-C++ header files use the ``.h`` extension. Any header file name not
-containing ``internal`` is considered to be a public header, and will be
-automatically installed by the build.
-
-Comments and Docstrings
-=======================
-
-Regular comments start with ``//``.
-
-Doxygen docstrings start with ``///``, and Doxygen directives start with ``\``,
-like this::
-
-   /// \brief Allocate a fixed size mutable buffer from a memory pool, zero its padding.
-   ///
-   /// \param[in] size size of buffer to allocate
-   /// \param[in] pool a memory pool
-   ARROW_EXPORT
-   Result<std::unique_ptr<Buffer>> AllocateBuffer(const int64_t size,
-                                                  MemoryPool* pool = NULLPTR);
-
-The summary line of a docstring uses the infinitive, not the indicative
-(for example, "Allocate a buffer" rather than "Allocates a buffer").
-
-Memory Pools
-============
-
-We provide a default memory pool with ``arrow::default_memory_pool()``.
-
-Error Handling and Exceptions
-=============================
-
-For error handling, we return ``arrow::Status`` values instead of throwing C++
-exceptions. Since the Arrow C++ libraries are intended to be useful as a
-component in larger C++ projects, using ``Status`` objects can help with good
-code hygiene by making explicit when a function is expected to be able to fail.
-
-A more recent option is to return a ``arrow::Result<T>`` object that can
-represent either a successful result with a ``T`` value, or an error result
-with a ``Status`` value.
-
-For expressing internal invariants and "cannot fail" errors, we use ``DCHECK`` macros
-defined in ``arrow/util/logging.h``. These checks are disabled in release builds
-and are intended to catch internal development errors, particularly when
-refactoring. These macros are not to be included in any public header files.
-
-Since we do not use exceptions, we avoid doing expensive work in object
-constructors. Objects that are expensive to construct may often have private
-constructors, with public static factory methods that return ``Status`` or
-``Result<T>``.
-
-There are a number of object constructors, like ``arrow::Schema`` and
-``arrow::RecordBatch`` where larger STL container objects like ``std::vector`` may
-be created. While it is possible for ``std::bad_alloc`` to be thrown in these
-constructors, the circumstances where they would are somewhat esoteric, and it
-is likely that an application would have encountered other more serious
-problems prior to having ``std::bad_alloc`` thrown in a constructor.
diff --git a/docs/source/developers/cpp/development.rst b/docs/source/developers/cpp/development.rst
deleted file mode 100644
index c0f5a0f..0000000
--- a/docs/source/developers/cpp/development.rst
+++ /dev/null
@@ -1,293 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-======================
-Development Guidelines
-======================
-
-This section provides information for developers who wish to contribute to the
-C++ codebase.
-
-.. note::
-
-   Since most of the project's developers work on Linux or macOS, not all
-   features or developer tools are uniformly supported on Windows. If you are
-   on Windows, have a look at :ref:`developers-cpp-windows`.
-
-Compiler warning levels
-=======================
-
-The ``BUILD_WARNING_LEVEL`` CMake option switches between sets of predetermined
-compiler warning levels that we use for code tidiness. For release builds, the
-default warning level is ``PRODUCTION``, while for debug builds the default is
-``CHECKIN``.
-
-When using ``CHECKIN`` for debug builds, ``-Werror`` is added when using gcc
-and clang, causing build failures for any warning, and ``/WX`` is set with MSVC
-having the same effect.
-
-Running unit tests
-==================
-
-The ``-DARROW_BUILD_TESTS=ON`` CMake option enables building of unit test
-executables.  You can then either run them individually, by launching the
-desired executable, or run them all at once by launching the ``ctest``
-executable (which is part of the CMake suite).
-
-A possible invocation is something like::
-
-   $ ctest -j16 --output-on-failure
-
-where the ``-j16`` option runs up to 16 tests in parallel, taking advantage
-of multiple CPU cores and hardware threads.
-
-Running benchmarks
-==================
-
-The ``-DARROW_BUILD_BENCHMARKS=ON`` CMake option enables building of benchmark
-executables.  You can then run benchmarks individually by launching the
-corresponding executable from the command line, e.g.::
-
-   $ ./build/release/arrow-builder-benchmark
-
-.. note::
-   For meaningful benchmark numbers, it is very strongly recommended to build
-   in ``Release`` mode, so as to enable compiler optimizations.
-
-Code Style, Linting, and CI
-===========================
-
-This project follows `Google's C++ Style Guide
-<https://google.github.io/styleguide/cppguide.html>`_ with minor exceptions:
-
-* We relax the line length restriction to 90 characters.
-* We use the ``NULLPTR`` macro in header files (instead of ``nullptr``) defined
-  in ``src/arrow/util/macros.h`` to support building C++/CLI (ARROW-1134)
-* We relax the guide's rules regarding structs.  For public headers we should
-  use struct only for objects that are principally simple data containers where
-  it is OK to expose all the internal members and any methods are primarily
-  conveniences.  For private headers the rules are relaxed further and structs
-  can be used where convenient for types that do not need access control even
-  though they may not be simple data containers.
-
-Our continuous integration builds on GitHub Actions run the unit test
-suites on a variety of platforms and configuration, including using
-Address Sanitizer and Undefined Behavior Sanitizer to check for various
-patterns of misbehaviour such as memory leaks. In addition, the
-codebase is subjected to a number of code style and code cleanliness checks.
-
-In order to have a passing CI build, your modified git branch must pass the
-following checks:
-
-* C++ builds with the project's active version of ``clang`` without
-  compiler warnings with ``-DBUILD_WARNING_LEVEL=CHECKIN``. Note that
-  there are classes of warnings (such as ``-Wdocumentation``, see more
-  on this below) that are not caught by ``gcc``.
-* Passes various C++ (and others) style checks, checked with the ``lint``
-  subcommand to :ref:`Archery <archery>`.
-* CMake files pass style checks, can be fixed by running
-  ``run-cmake-format.py`` from the root of the repository. This requires Python
-  3 and `cmake_format <https://github.com/cheshirekow/cmake_format>`_ (note:
-  this currently does not work on Windows)
-
-In order to account for variations in the behavior of ``clang-format`` between
-major versions of LLVM, we pin the version of ``clang-format`` used (current
-LLVM 8).
-
-Depending on how you installed clang-format, the build system may not be able
-to find it. You can provide an explicit path to your LLVM installation (or the
-root path for the clang tools) with the environment variable
-`$CLANG_TOOLS_PATH` or by passing ``-DClangTools_PATH=$PATH_TO_CLANG_TOOLS`` when
-invoking CMake.
-
-To make linting more reproducible for everyone, we provide a ``docker-compose``
-target that is executable from the root of the repository:
-
-.. code-block:: shell
-
-   docker-compose run ubuntu-lint
-
-Cleaning includes with include-what-you-use (IWYU)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-We occasionally use Google's `include-what-you-use
-<https://github.com/include-what-you-use/include-what-you-use>`_ tool, also
-known as IWYU, to remove unnecessary imports.
-
-To begin using IWYU, you must first build it by following the instructions in
-the project's documentation. Once the ``include-what-you-use`` executable is in
-your ``$PATH``, you must run CMake with ``-DCMAKE_EXPORT_COMPILE_COMMANDS=ON``
-in a new out-of-source CMake build directory like so:
-
-.. code-block:: shell
-
-   mkdir -p $ARROW_ROOT/cpp/iwyu
-   cd $ARROW_ROOT/cpp/iwyu
-   cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
-     -DARROW_PYTHON=ON \
-     -DARROW_PARQUET=ON \
-     -DARROW_FLIGHT=ON \
-     -DARROW_PLASMA=ON \
-     -DARROW_GANDIVA=ON \
-     -DARROW_BUILD_BENCHMARKS=ON \
-     -DARROW_BUILD_BENCHMARKS_REFERENCE=ON \
-     -DARROW_BUILD_TESTS=ON \
-     -DARROW_BUILD_UTILITIES=ON \
-     -DARROW_S3=ON \
-     -DARROW_WITH_BROTLI=ON \
-     -DARROW_WITH_BZ2=ON \
-     -DARROW_WITH_LZ4=ON \
-     -DARROW_WITH_SNAPPY=ON \
-     -DARROW_WITH_ZLIB=ON \
-     -DARROW_WITH_ZSTD=ON ..
-
-In order for IWYU to run on the desired component in the codebase, it must be
-enabled by the CMake configuration flags. Once this is done, you can run IWYU
-on the whole codebase by running a helper ``iwyu.sh`` script:
-
-.. code-block:: shell
-
-   IWYU_SH=$ARROW_ROOT/cpp/build-support/iwyu/iwyu.sh
-   ./$IWYU_SH
-
-Since this is very time consuming, you can check a subset of files matching
-some string pattern with the special "match" option
-
-.. code-block:: shell
-
-   ./$IWYU_SH match $PATTERN
-
-For example, if you wanted to do IWYU checks on all files in
-``src/arrow/array``, you could run
-
-.. code-block:: shell
-
-   ./$IWYU_SH match arrow/array
-
-Checking for ABI and API stability
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To build ABI compliance reports, you need to install the two tools
-``abi-dumper`` and ``abi-compliance-checker``.
-
-Build Arrow C++ in Debug mode, alternatively you could use ``-Og`` which also
-builds with the necessary symbols but includes a bit of code optimization.
-Once the build has finished, you can generate ABI reports using:
-
-.. code-block:: shell
-
-   abi-dumper -lver 9 debug/libarrow.so -o ABI-9.dump
-
-The above version number is freely selectable. As we want to compare versions,
-you should now ``git checkout`` the version you want to compare it to and re-run
-the above command using a different version number. Once both reports are
-generated, you can build a comparison report using
-
-.. code-block:: shell
-
-   abi-compliance-checker -l libarrow -d1 ABI-PY-9.dump -d2 ABI-PY-10.dump
-
-The report is then generated in ``compat_reports/libarrow`` as a HTML.
-
-API Documentation
-=================
-
-We use Doxygen style comments (``///``) in header files for comments
-that we wish to show up in API documentation for classes and
-functions.
-
-When using ``clang`` and building with
-``-DBUILD_WARNING_LEVEL=CHECKIN``, the ``-Wdocumentation`` flag is
-used which checks for some common documentation inconsistencies, like
-documenting some, but not all function parameters with ``\param``. See
-the `LLVM documentation warnings section
-<https://releases.llvm.org/7.0.1/tools/clang/docs/DiagnosticsReference.html#wdocumentation>`_
-for more about this.
-
-While we publish the API documentation as part of the main Sphinx-based
-documentation site, you can also build the C++ API documentation anytime using
-Doxygen. Run the following command from the ``cpp/apidoc`` directory:
-
-.. code-block:: shell
-
-   doxygen Doxyfile
-
-This requires `Doxygen <https://www.doxygen.org>`_ to be installed.
-
-Apache Parquet Development
-==========================
-
-To build the C++ libraries for Apache Parquet, add the flag
-``-DARROW_PARQUET=ON`` when invoking CMake.
-To build Apache Parquet with encryption support, add the flag
-``-DPARQUET_REQUIRE_ENCRYPTION=ON`` when invoking CMake. The Parquet libraries and unit tests
-can be built with the ``parquet`` make target:
-
-.. code-block:: shell
-
-   make parquet
-
-On Linux and macOS if you do not have Apache Thrift installed on your system,
-or you are building with ``-DThrift_SOURCE=BUNDLED``, you must install
-``bison`` and ``flex`` packages. On Windows we handle these build dependencies
-automatically when building Thrift from source.
-
-Running ``ctest -L unittest`` will run all built C++ unit tests, while ``ctest -L
-parquet`` will run only the Parquet unit tests. The unit tests depend on an
-environment variable ``PARQUET_TEST_DATA`` that depends on a git submodule to the
-repository https://github.com/apache/parquet-testing:
-
-.. code-block:: shell
-
-   git submodule update --init
-   export PARQUET_TEST_DATA=$ARROW_ROOT/cpp/submodules/parquet-testing/data
-
-Here ``$ARROW_ROOT`` is the absolute path to the Arrow codebase.
-
-Arrow Flight RPC
-================
-
-In addition to the Arrow dependencies, Flight requires:
-
-* gRPC (>= 1.14, roughly)
-* Protobuf (>= 3.6, earlier versions may work)
-* c-ares (used by gRPC)
-
-By default, Arrow will try to download and build these dependencies
-when building Flight.
-
-The optional ``flight`` libraries and tests can be built by passing
-``-DARROW_FLIGHT=ON``.
-
-.. code-block:: shell
-
-   cmake .. -DARROW_FLIGHT=ON -DARROW_BUILD_TESTS=ON
-   make
-
-You can also use existing installations of the extra dependencies.
-When building, set the environment variables ``gRPC_ROOT`` and/or
-``Protobuf_ROOT`` and/or ``c-ares_ROOT``.
-
-We are developing against recent versions of gRPC, and the versions. The
-``grpc-cpp`` package available from https://conda-forge.org/ is one reliable
-way to obtain gRPC in a cross-platform way. You may try using system libraries
-for gRPC and Protobuf, but these are likely to be too old. On macOS, you can
-try `Homebrew <https://brew.sh/>`_:
-
-.. code-block:: shell
-
-   brew install grpc
diff --git a/docs/source/developers/cpp/fuzzing.rst b/docs/source/developers/cpp/fuzzing.rst
deleted file mode 100644
index 41398a1..0000000
--- a/docs/source/developers/cpp/fuzzing.rst
+++ /dev/null
@@ -1,99 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-=================
-Fuzzing Arrow C++
-=================
-
-To make the handling of invalid input more robust, we have enabled
-fuzz testing on several parts of the Arrow C++ feature set, currently:
-
-* the IPC stream format
-* the IPC file format
-* the Parquet file format
-
-We welcome any contribution to expand the scope of fuzz testing and cover
-areas ingesting potentially invalid or malicious data.
-
-Fuzz Targets and Utilities
-==========================
-
-By passing the ``-DARROW_FUZZING=ON`` CMake option, you will build
-the fuzz targets corresponding to the aforementioned Arrow features, as well
-as additional related utilities.
-
-Generating the seed corpus
---------------------------
-
-Fuzzing essentially explores the domain space by randomly mutating previously
-tested inputs, without having any high-level understanding of the area being
-fuzz-tested.  However, the domain space is so huge that this strategy alone
-may fail to actually produce any "interesting" inputs.
-
-To guide the process, it is therefore important to provide a *seed corpus*
-of valid (or invalid, but remarkable) inputs from which the fuzzing
-infrastructure can derive new inputs for testing.  A script is provided
-to automate that task.  Assuming the fuzzing executables can be found in
-``build/debug``, the seed corpus can be generated thusly:
-
-.. code-block:: shell
-
-   $ ./build-support/fuzzing/generate_corpuses.sh build/debug
-
-Continuous fuzzing infrastructure
-=================================
-
-The process of fuzz testing is computationally intensive and therefore
-benefits from dedicated computing facilities.  Arrow C++ is exercised by
-the `OSS-Fuzz`_ continuous fuzzing infrastructure operated by Google.
-
-Issues found by OSS-Fuzz are notified and available to a limited set of
-`core developers <https://github.com/google/oss-fuzz/blob/master/projects/arrow/project.yaml>`_.
-If you are a Arrow core developer and want to be added to that list, you can
-ask on the :ref:`mailing-list <contributing>`.
-
-.. _OSS-Fuzz: https://google.github.io/oss-fuzz/
-
-Reproducing locally
-===================
-
-When a crash is found by fuzzing, it is often useful to download the data
-used to produce the crash, and use it to reproduce the crash so as to debug
-and investigate.
-
-Assuming you are in a subdirectory inside ``cpp``, the following command
-would allow you to build the fuzz targets with debug information and the
-various sanitizer checks enabled.
-
-.. code-block:: shell
-
-   $ cmake .. -GNinja \
-       -DCMAKE_BUILD_TYPE=Debug \
-       -DARROW_USE_ASAN=on \
-       -DARROW_USE_UBSAN=on \
-       -DARROW_FUZZING=on
-
-Then, assuming you have downloaded the crashing data file (let's call it
-``testcase-arrow-ipc-file-fuzz-123465``), you can reproduce the crash
-by running the affected fuzz target on that file:
-
-.. code-block:: shell
-
-   $ build/debug/arrow-ipc-file-fuzz testcase-arrow-ipc-file-fuzz-123465
-
-(you may want to run that command under a debugger so as to inspect the
-program state more closely)
diff --git a/docs/source/developers/cpp/index.rst b/docs/source/developers/cpp/index.rst
deleted file mode 100644
index 36c9778..0000000
--- a/docs/source/developers/cpp/index.rst
+++ /dev/null
@@ -1,31 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. _cpp-development:
-
-***************
-C++ Development
-***************
-
-.. toctree::
-   :maxdepth: 2
-
-   building
-   development
-   windows
-   conventions
-   fuzzing
diff --git a/docs/source/developers/cpp/windows.rst b/docs/source/developers/cpp/windows.rst
deleted file mode 100644
index 4de67ba..0000000
--- a/docs/source/developers/cpp/windows.rst
+++ /dev/null
@@ -1,416 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. _developers-cpp-windows:
-
-=====================
-Developing on Windows
-=====================
-
-Like Linux and macOS, we have worked to enable builds to work "out of the box"
-with CMake for a reasonably large subset of the project.
-
-.. _windows-system-setup:
-
-System Setup
-============
-
-Microsoft provides the free Visual Studio Community edition. When doing
-development in the shell, you must initialize the development environment
-each time you open the shell.
-
-For Visual Studio 2015, execute the following batch script:
-
-.. code-block:: shell
-
-   "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" amd64
-
-For Visual Studio 2017, the script is:
-
-.. code-block:: shell
-
-   "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\Common7\Tools\VsDevCmd.bat" -arch=amd64
-
-For Visual Studio 2019, the script is:
-
-.. code-block:: shell
-
-  "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\Common7\Tools\VsDevCmd.bat" -arch=amd64
-
-One can configure a console emulator like `cmder <https://cmder.net/>`_ to
-automatically launch this when starting a new development console.
-
-Using conda-forge for build dependencies
-========================================
-
-`Miniconda <https://conda.io/miniconda.html>`_ is a minimal Python distribution
-including the `conda <https://conda.io>`_ package manager. Some memers of the
-Apache Arrow community participate in the maintenance of `conda-forge
-<https://conda-forge.org/>`_, a community-maintained cross-platform package
-repository for conda.
-
-To use ``conda-forge`` for your C++ build dependencies on Windows, first
-download and install a 64-bit distribution from the `Miniconda homepage
-<https://conda.io/miniconda.html>`_
-
-To configure ``conda`` to use the ``conda-forge`` channel by default, launch a
-command prompt (``cmd.exe``), run the initialization command shown
-:ref:`above<windows-system-setup>` (``vcvarsall.bat`` or ``VsDevCmd.bat``), then
-run the command:
-
-.. code-block:: shell
-
-   conda config --add channels conda-forge
-
-Now, you can bootstrap a build environment (call from the root directory of the
-Arrow codebase):
-
-.. code-block:: shell
-
-   conda create -y -n arrow-dev --file=ci\conda_env_cpp.yml
-
-Then "activate" this conda environment with:
-
-.. code-block:: shell
-
-   activate arrow-dev
-
-If the environment has been activated, the Arrow build system will
-automatically see the ``%CONDA_PREFIX%`` environment variable and use that for
-resolving the build dependencies. This is equivalent to setting
-
-.. code-block:: shell
-
-   -DARROW_DEPENDENCY_SOURCE=SYSTEM ^
-   -DARROW_PACKAGE_PREFIX=%CONDA_PREFIX%\Library
-
-To use the Visual Studio IDE with this conda environment activated, launch it by
-running the command ``devenv`` from the same command prompt.
-
-Note that dependencies installed as conda packages are built in release mode and
-cannot link with debug builds. If you intend to use ``-DCMAKE_BUILD_TYPE=debug``
-then you must build the packages from source.
-``-DCMAKE_BUILD_TYPE=relwithdebinfo`` is also available, which produces a build
-that can both be linked with release libraries and be debugged.
-
-.. note::
-
-   If you run into any problems using conda packages for dependencies, a very
-   common problem is mixing packages from the ``defaults`` channel with those
-   from ``conda-forge``. You can examine the installed packages in your
-   environment (and their origin) with ``conda list``
-
-Using vcpkg for build dependencies
-========================================
-
-`vcpkg <https://github.com/microsoft/vcpkg>`_ is an open source package manager
-from Microsoft. It hosts community-contributed ports of C and C++ packages and
-their dependencies. Arrow includes a manifest file `cpp/vcpkg.json
-<https://github.com/apache/arrow/blob/master/cpp/vcpkg.json>`_ that specifies
-which vcpkg packages are required to build the C++ library.
-
-To use vcpkg for C++ build dependencies on Windows, first
-`install <https://docs.microsoft.com/en-us/cpp/build/install-vcpkg>`_ and
-`integrate <https://docs.microsoft.com/en-us/cpp/build/integrate-vcpkg>`_
-vcpkg. Then change working directory in ``cmd.exe`` to the root directory
-of Arrow and run the command:
-
-.. code-block:: shell
-
-   vcpkg install ^
-     --triplet x64-windows ^
-     --x-manifest-root cpp  ^
-     --clean-after-build
-
-On Windows, vcpkg builds dynamic link libraries by default. Use the triplet
-``x64-windows-static`` to build static libraries. vcpkg downloads source 
-packages and compiles them locally, so installing dependencies with vcpkg is
-more time-consuming than with conda.
-
-Then in your ``cmake`` command, to use dependencies installed by vcpkg, set:
-
-.. code-block:: shell
-
-   -DARROW_DEPENDENCY_SOURCE=VCPKG
-
-You can optionally set other variables to override the default CMake
-configurations for vcpkg, including:
-   
-* ``-DCMAKE_TOOLCHAIN_FILE``: by default, the CMake scripts automatically find
-  the location of the vcpkg CMake toolchain file ``vcpkg.cmake``; use this to
-  instead specify its location
-* ``-DVCPKG_TARGET_TRIPLET``: by default, the CMake scripts attempt to infer the
-  vcpkg 
-  `triplet <https://github.com/microsoft/vcpkg/blob/master/docs/users/triplets.md>`_;
-  use this to instead specify the triplet
-* ``-DARROW_DEPENDENCY_USE_SHARED``: default is ``ON``; set to ``OFF`` for
-  static libraries
-* ``-DVCPKG_MANIFEST_MODE``: default is ``ON``; set to ``OFF`` to ignore the
-  ``vcpkg.json`` manifest file and only look for vcpkg packages that are
-  already installed under the directory where vcpkg is installed
-
-
-Building using Visual Studio (MSVC) Solution Files
-==================================================
-
-Change working directory in ``cmd.exe`` to the root directory of Arrow and do
-an out of source build by generating a MSVC solution:
-
-.. code-block:: shell
-
-   cd cpp
-   mkdir build
-   cd build
-   cmake .. -G "Visual Studio 14 2015" -A x64 ^
-         -DARROW_BUILD_TESTS=ON
-   cmake --build . --config Release
-
-For newer versions of Visual Studio, specify the generator
-``Visual Studio 15 2017`` or ``Visual Studio 16 2019``.
-
-Building with Ninja and clcache
-===============================
-
-The `Ninja <https://ninja-build.org/>`_ build system offers better build
-parallelization, and the optional `clcache
-<https://github.com/frerich/clcache/>`_ compiler cache keeps track of
-past compilations to avoid running them over and over again (in a way similar
-to the Unix-specific ``ccache``).
-
-Newer versions of Visual Studio include Ninja. To see if your Visual Studio
-includes Ninja, run the initialization command shown
-:ref:`above<windows-system-setup>` (``vcvarsall.bat`` or ``VsDevCmd.bat``), then
-run ``ninja --version``.
-
-If Ninja is not included in your version of Visual Studio, and you are using
-conda, activate your conda environment and install Ninja and clcache:
-
-.. code-block:: shell
-
-   activate arrow-dev
-   conda install -c conda-forge ninja
-   pip install git+https://github.com/frerich/clcache.git
-
-If you are not using conda,
-`install Ninja from another source <https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages>`_
-and optionally
-`install clcache from another source <https://github.com/frerich/clcache/wiki/Installation>`_
-.
-
-After installation is complete, change working directory in ``cmd.exe`` to the root directory of Arrow and
-do an out of source build by generating Ninja files:
-
-.. code-block:: shell
-
-   cd cpp
-   mkdir build
-   cd build
-   cmake -G "Ninja" ^
-         -DCMAKE_C_COMPILER=clcache ^
-         -DCMAKE_CXX_COMPILER=clcache ^
-         -DARROW_BUILD_TESTS=ON ^
-         -DGTest_SOURCE=BUNDLED ..
-   cmake --build . --config Release
-
-Setting ``CMAKE_C_COMPILER`` and ``CMAKE_CXX_COMPILER`` in the command line
-of ``cmake`` is the preferred method of using ``clcache``. Alternatively, you
-can set ``CC`` and ``CXX`` environment variables before calling ``cmake``:
-
-.. code-block:: shell
-
-   ...
-   set CC=clcache
-   set CXX=clcache
-   cmake -G "Ninja" ^
-   ...
-
-
-
-Building with NMake
-===================
-
-Change working directory in ``cmd.exe`` to the root directory of Arrow and
-do an out of source build using ``nmake``:
-
-.. code-block:: shell
-
-   cd cpp
-   mkdir build
-   cd build
-   cmake -G "NMake Makefiles" ..
-   nmake
-
-Building on MSYS2
-=================
-
-You can build on MSYS2 terminal, ``cmd.exe`` or PowerShell terminal.
-
-On MSYS2 terminal:
-
-.. code-block:: shell
-
-   cd cpp
-   mkdir build
-   cd build
-   cmake -G "MSYS Makefiles" ..
-   make
-
-On ``cmd.exe`` or PowerShell terminal, you can use the following batch
-file:
-
-.. code-block:: batch
-
-   setlocal
-
-   REM For 64bit
-   set MINGW_PACKAGE_PREFIX=mingw-w64-x86_64
-   set MINGW_PREFIX=c:\msys64\mingw64
-   set MSYSTEM=MINGW64
-
-   set PATH=%MINGW_PREFIX%\bin;c:\msys64\usr\bin;%PATH%
-
-   rmdir /S /Q cpp\build
-   mkdir cpp\build
-   pushd cpp\build
-   cmake -G "MSYS Makefiles" .. || exit /B
-   make || exit /B
-   popd
-
-Debug builds
-============
-
-To build a Debug version of Arrow, you should have pre-installed a Debug
-version of Boost. It's recommended to configure ``cmake`` with the following
-variables for Debug build:
-
-* ``-DARROW_BOOST_USE_SHARED=OFF``: enables static linking with boost debug
-  libs and simplifies run-time loading of 3rd parties
-* ``-DBOOST_ROOT``: sets the root directory of boost libs. (Optional)
-* ``-DBOOST_LIBRARYDIR``: sets the directory with boost lib files. (Optional)
-
-The command line to build Arrow in Debug mode will look something like this:
-
-.. code-block:: shell
-
-   cd cpp
-   mkdir build
-   cd build
-   cmake .. -G "Visual Studio 14 2015" -A x64 ^
-         -DARROW_BOOST_USE_SHARED=OFF ^
-         -DCMAKE_BUILD_TYPE=Debug ^
-         -DBOOST_ROOT=C:/local/boost_1_63_0  ^
-         -DBOOST_LIBRARYDIR=C:/local/boost_1_63_0/lib64-msvc-14.0
-   cmake --build . --config Debug
-
-Windows dependency resolution issues
-====================================
-
-Because Windows uses ``.lib`` files for both static and dynamic linking of
-dependencies, the static library sometimes may be named something different
-like ``%PACKAGE%_static.lib`` to distinguish itself. If you are statically
-linking some dependencies, we provide some options
-
-* ``-DBROTLI_MSVC_STATIC_LIB_SUFFIX=%BROTLI_SUFFIX%``
-* ``-DSNAPPY_MSVC_STATIC_LIB_SUFFIX=%SNAPPY_SUFFIX%``
-* ``-LZ4_MSVC_STATIC_LIB_SUFFIX=%LZ4_SUFFIX%``
-* ``-ZSTD_MSVC_STATIC_LIB_SUFFIX=%ZSTD_SUFFIX%``
-
-To get the latest build instructions, you can reference `ci/appveyor-built.bat
-<https://github.com/apache/arrow/blob/master/ci/appveyor-cpp-build.bat>`_,
-which is used by automated Appveyor builds.
-
-Statically linking to Arrow on Windows
-======================================
-
-The Arrow headers on Windows static library builds (enabled by the CMake
-option ``ARROW_BUILD_STATIC``) use the preprocessor macro ``ARROW_STATIC`` to
-suppress dllimport/dllexport marking of symbols. Projects that statically link
-against Arrow on Windows additionally need this definition. The Unix builds do
-not use the macro.
-
-Replicating Appveyor Builds
-===========================
-
-For people more familiar with linux development but need to replicate a failing
-appveyor build, here are some rough notes from replicating the
-``Static_Crt_Build`` (make unittest will probably still fail but many unit
-tests can be made with there individual make targets).
-
-1. Microsoft offers trial VMs for `Windows with Microsoft Visual Studio
-   <https://developer.microsoft.com/en-us/windows/downloads/virtual-machines>`_.
-   Download and install a version.
-2. Run the VM and install `Git <https://git-scm.com/>`_, `CMake
-   <https://cmake.org/>`_, and Miniconda or Anaconda (these instructions assume
-   Anaconda). Also install the `"Build Tools for Visual Studio"
-   <https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2019>`_.
-   Make sure to select the C++ toolchain in the installer wizard, and reboot
-   after installation.
-3. Download `pre-built Boost debug binaries
-   <https://sourceforge.net/projects/boost/files/boost-binaries/>`_ and install
-   it.
-
-   Run this from an Anaconda/Miniconda command prompt (*not* PowerShell prompt),
-   and make sure to run "vcvarsall.bat x64" first. The location of vcvarsall.bat
-   will depend, it may be under a different path than commonly indicated,
-   e.g. "``C:\Program Files (x86)\Microsoft Visual Studio\2019\BuildTools\VC\Auxiliary\Build\vcvarsall.bat``"
-   with the 2019 build tools.
-
-.. code-block:: shell
-
-   cd $EXTRACT_BOOST_DIRECTORY
-   .\bootstrap.bat
-   @rem This is for static libraries needed for static_crt_build in appveyor
-   .\b2 link=static --with-filesystem --with-regex --with-system install
-   @rem this should put libraries and headers in c:\Boost
-
-4. Activate anaconda/miniconda:
-
-.. code-block:: shell
-
-   @rem this might differ for miniconda
-   C:\Users\User\Anaconda3\Scripts\activate
-
-5. Clone and change directories to the arrow source code (you might need to
-   install git).
-6. Setup environment variables:
-
-.. code-block:: shell
-
-   @rem Change the build type based on which appveyor job you want.
-   SET JOB=Static_Crt_Build
-   SET GENERATOR=Ninja
-   SET APPVEYOR_BUILD_WORKER_IMAGE=Visual Studio 2017
-   SET USE_CLCACHE=false
-   SET ARROW_BUILD_GANDIVA=OFF
-   SET ARROW_LLVM_VERSION=8.0.*
-   SET PYTHON=3.6
-   SET ARCH=64
-   SET PATH=C:\Users\User\Anaconda3;C:\Users\User\Anaconda3\Scripts;C:\Users\User\Anaconda3\Library\bin;%PATH%
-   SET BOOST_LIBRARYDIR=C:\Boost\lib
-   SET BOOST_ROOT=C:\Boost
-
-7. Run appveyor scripts:
-
-.. code-block:: shell
-
-   conda install -c conda-forge --file .\ci\conda_env_cpp.yml
-   .\ci\appveyor-cpp-setup.bat
-   @rem this might fail but at this point most unit tests should be buildable by there individual targets
-   @rem see next line for example.
-   .\ci\appveyor-cpp-build.bat
-   @rem you can also just invoke cmake directly with the desired options
-   cmake --build . --config Release --target arrow-compute-hash-test
diff --git a/docs/source/developers/crossbow.rst b/docs/source/developers/crossbow.rst
deleted file mode 100644
index 7d5a3ca..0000000
--- a/docs/source/developers/crossbow.rst
+++ /dev/null
@@ -1,257 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-Packaging and Testing with Crossbow
-===================================
-
-The content of ``arrow/dev/tasks`` directory aims for automating the process of
-Arrow packaging and integration testing.
-
-Packages:
-  - C++ and Python `conda-forge packages`_ for Linux, Mac and Windows
-  - Python `Wheels`_ for Linux, Mac and Windows
-  - C++ and GLib `Linux packages`_ for multiple distributions
-  - Java for Gandiva
-
-Integration tests:
-  - Various docker tests
-  - Pandas
-  - Dask
-  - Turbodbc
-  - HDFS
-  - Spark
-
-Architecture
-------------
-
-Executors
-~~~~~~~~~
-
-Individual jobs are executed on public CI services, currently:
-
-- Linux: TravisCI, CircleCI, Azure Pipelines
-- Mac: TravisCI, Azure Pipelines
-- Windows: AppVeyor, Azure Pipelines
-
-Queue
-~~~~~
-
-Because of the nature of how the CI services work, the scheduling of
-jobs happens through an additional git repository, which acts like a job
-queue for the tasks. Anyone can host a ``queue`` repository which is usually
-called as ``crossbow``.
-
-A job is a git commit on a particular git branch, containing only the required
-configuration file to run the requested build (like ``.travis.yml``,
-``appveyor.yml`` or ``azure-pipelines.yml``).
-
-Scheduler
-~~~~~~~~~
-
-`Crossbow.py`_ handles version generation, task rendering and
-submission. The tasks are defined in ``tasks.yml``.
-
-Install
--------
-
-   The following guide depends on GitHub, but theoretically any git
-   server can be used.
-
-1. `Create the queue repository`_
-
-2. Enable `TravisCI`_, `Appveyor`_, `Azure Pipelines_` and `CircleCI`_
-   integrations on for the newly created queue repository.
-
-   -  turn off Travis’ `auto cancellation`_ feature on branches
-
-3. Clone the newly created repository next to the arrow repository:
-
-   By default the scripts looks for ``crossbow`` next to arrow repository, but
-   this can configured through command line arguments.
-
-   .. code:: bash
-
-      git clone https://github.com/<user>/crossbow crossbow
-
-   **Important note:** Crossbow only supports GitHub token based
-   authentication. Although it overwrites the repository urls provided with ssh
-   protocol, it's advisable to use the HTTPS repository URLs.
-
-4. `Create a Personal Access Token`_ with ``repo`` and ``workflow`` permissions (other
-   permissions are not needed)
-
-5. Locally export the token as an environment variable:
-
-   .. code:: bash
-
-      export CROSSBOW_GITHUB_TOKEN=<token>
-
-   ..
-
-      or pass as an argument to the CLI script ``--github-token``
-
-6. Export the previously created GitHub token on both CI services:
-
-   Use ``CROSSBOW_GITHUB_TOKEN`` encrypted environment variable. You can
-   set them at the following URLs, where ``ghuser`` is the GitHub
-   username and ``ghrepo`` is the GitHub repository name (typically
-   ``crossbow``):
-
-   -  TravisCI: ``https://travis-ci.org/<ghuser>/<ghrepo>/settings``
-   -  Appveyor:
-      ``https://ci.appveyor.com/project/<ghuser>/<ghrepo>/settings/environment``
-   -  CircleCI:
-      ``https://circleci.com/gh/<ghuser>/<ghrepo>/edit#env-vars``
-
-   On Appveyor check the ``skip branches without appveyor.yml`` checkbox
-   on the web UI under crossbow repository’s settings.
-
-7. Install Python (minimum supported version is 3.6):
-
-   Miniconda is preferred, see installation instructions:
-   https://conda.io/docs/user-guide/install/index.html
-
-8. Install the archery toolset containing crossbow itself:
-
-   .. code:: bash
-
-      pip install -e arrow/dev/archery[crossbow]
-
-9. Try running it:
-
-   .. code:: bash
-
-      $ archery crossbow --help
-
-Usage
------
-
-The script does the following:
-
-1. Detects the current repository, thus supports forks. The following
-   snippet will build kszucs’s fork instead of the upstream apache/arrow
-   repository.
-
-   .. code:: bash
-
-      $ git clone https://github.com/kszucs/arrow
-      $ git clone https://github.com/kszucs/crossbow
-
-      $ cd arrow/dev/tasks
-      $ archery crossbow submit --help  # show the available options
-      $ archery crossbow submit conda-win conda-linux conda-osx
-
-2. Gets the HEAD commit of the currently checked out branch and
-   generates the version number based on `setuptools_scm`_. So to build
-   a particular branch check out before running the script:
-
-   .. code:: bash
-
-      git checkout ARROW-<ticket number>
-      archery crossbow submit --dry-run conda-linux conda-osx
-
-   ..
-
-      Note that the arrow branch must be pushed beforehand, because the
-      script will clone the selected branch.
-
-3. Reads and renders the required build configurations with the
-   parameters substituted.
-
-4. Create a branch per task, prefixed with the job id. For example to
-   build conda recipes on linux it will create a new branch:
-   ``crossbow@build-<id>-conda-linux``.
-
-5. Pushes the modified branches to GitHub which triggers the builds. For
-   authentication it uses GitHub OAuth tokens described in the install
-   section.
-
-Query the build status
-~~~~~~~~~~~~~~~~~~~~~~
-
-Build id (which has a corresponding branch in the queue repository) is returned
-by the ``submit`` command.
-
-.. code:: bash
-
-   archery crossbow status <build id / branch name>
-
-Download the build artifacts
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-.. code:: bash
-
-   archery crossbow artifacts <build id / branch name>
-
-Examples
-~~~~~~~~
-
-Submit command accepts a list of task names and/or a list of task-group names
-to select which tasks to build.
-
-Run multiple builds:
-
-.. code:: bash
-
-   $ archery crossbow submit debian-stretch conda-linux-gcc-py37-r40
-   Repository: https://github.com/kszucs/arrow@tasks
-   Commit SHA: 810a718836bb3a8cefc053055600bdcc440e6702
-   Version: 0.9.1.dev48+g810a7188.d20180414
-   Pushed branches:
-    - debian-stretch
-    - conda-linux-gcc-py37-r40
-
-Just render without applying or committing the changes:
-
-.. code:: bash
-
-   $ archery crossbow submit --dry-run task_name
-
-Run only ``conda`` package builds and a Linux one:
-
-.. code:: bash
-
-   $ archery crossbow submit --group conda centos-7
-
-Run ``wheel`` builds:
-
-.. code:: bash
-
-   $ archery crossbow submit --group wheel
-
-There are multiple task groups in the ``tasks.yml`` like docker, integration
-and cpp-python for running docker based tests.
-
-``archery crossbow submit`` supports multiple options and arguments, for more
-see its help page:
-
-.. code:: bash
-
-  $ archery crossbow submit --help
-
-
-.. _conda-forge packages: conda-recipes
-.. _Wheels: python-wheels
-.. _Linux packages: linux-packages
-.. _Create the queue repository: https://help.github.com/articles/creating-a-new-repository
-.. _TravisCI: https://travis-ci.org/getting_started
-.. _Appveyor: https://www.appveyor.com/docs/
-.. _CircleCI: https://circleci.com/docs/2.0/getting-started/
-.. _Azure Pipelines: https://docs.microsoft.com/en-us/azure/devops/pipelines/get-started/pipelines-sign-up
-.. _auto cancellation: https://docs.travis-ci.com/user/customizing-the-build/#Building-only-the-latest-commit
-.. _Create a Personal Access Token: https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/
-.. _setuptools_scm: https://pypi.python.org/pypi/setuptools_scm
diff --git a/docs/source/developers/docker.rst b/docs/source/developers/docker.rst
deleted file mode 100644
index eaabad9..0000000
--- a/docs/source/developers/docker.rst
+++ /dev/null
@@ -1,225 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. _docker-builds:
-
-Running Docker Builds
-=====================
-
-Most of our Linux based Continuous Integration tasks are decoupled from public
-CI services using `Docker <https://docs.docker.com/>`_ and
-`docker-compose <https://docs.docker.com/compose/>`_.  Keeping the CI configuration
-minimal makes local reproducibility possible.
-
-Usage
------
-
-There are multiple ways to execute the docker based builds.
-The recommended way is to use the :ref:`Archery <archery>` tool:
-
-Examples
-~~~~~~~~
-
-**List the available images:**
-
-.. code:: bash
-
-    archery docker images
-
-**Execute a build:**
-
-.. code:: bash
-
-    archery docker run conda-python
-
-Archery calls the following docker-compose commands:
-
-.. code:: bash
-
-    docker-compose pull --ignore-pull-failures conda-cpp
-    docker-compose pull --ignore-pull-failures conda-python
-    docker-compose build conda-cpp
-    docker-compose build conda-python
-    docker-compose run --rm conda-python
-
-**Show the docker-compose commands instead of executing them:**
-
-.. code:: bash
-
-    archery docker run --dry-run conda-python
-
-**To disable the image pulling:**
-
-.. code:: bash
-
-    archery docker run --no-cache conda-python
-
-Which translates to:
-
-.. code:: bash
-
-    docker-compose build --no-cache conda-cpp
-    docker-compose build --no-cache conda-python
-    docker-compose run --rm conda-python
-
-**To disable the cache only for the leaf image:**
-
-Useful to force building the development version of a dependency.
-In case of the example below the command builds the
-``conda-cpp > conda-python > conda-python-pandas`` branch of the image tree
-where the leaf image is ``conda-python-pandas``.
-
-.. code:: bash
-
-    PANDAS=master archery docker run --no-leaf-cache conda-python-pandas
-
-Which translates to:
-
-.. code:: bash
-
-    export PANDAS=master
-    docker-compose pull --ignore-pull-failures conda-cpp
-    docker-compose pull --ignore-pull-failures conda-python
-    docker-compose build conda-cpp
-    docker-compose build conda-python
-    docker-compose build --no-cache conda-python-pandas
-    docker-compose run --rm conda-python-pandas
-
-Note that it doesn't pull the conda-python-pandas image and disable the cache
-when building it.
-
-``PANDAS`` is a `build parameter <Docker Build Parameters>`_, see the
-defaults in the .env file.
-
-**To entirely skip building the image:**
-
-The layer-caching mechanism of docker-compose can be less reliable than
-docker's, depending on the version, the ``cache_from`` build entry, and the
-backend used (docker-py, docker-cli, docker-cli and buildkit). This can lead to
-different layer hashes - even when executing the same build command
-repeatedly - eventually causing cache misses full image rebuilds.
-
-*If the image has been already built but the cache doesn't work properly*, it
-can be useful to skip the build phases:
-
-.. code:: bash
-
-    # first run ensures that the image is built
-    archery docker run conda-python
-
-    # if the second run tries the build the image again and none of the files
-    # referenced in the relevant dockerfile have changed, then it indicates a
-    # cache miss caused by the issue described above
-    archery docker run conda-python
-
-    # since the image is properly built with the first command, there is no
-    # need to rebuild it, so manually disable the pull and build phases to
-    # spare the some time
-    archery docker run --no-pull --no-build conda-python
-
-**Pass environment variables to the container:**
-
-Most of the build scripts used within the containers can be configured through
-environment variables. Pass them using ``--env`` or ``-e`` CLI options -
-similar to the ``docker run`` and ``docker-compose run`` interface.
-
-.. code:: bash
-
-    archery docker run --env CMAKE_BUILD_TYPE=release ubuntu-cpp
-
-For the available environment variables in the C++ builds see the
-``ci/scripts/cpp_build.sh`` script.
-
-**Run the image with custom command:**
-
-Custom docker commands may be passed as the second argument to
-``archery docker run``.
-
-The following example starts an interactive ``bash`` session in the container
-- useful for debugging the build interactively:
-
-.. code:: bash
-
-    archery docker run ubuntu-cpp bash
-
-Docker Volume Caches
-~~~~~~~~~~~~~~~~~~~~
-
-Most of the compose container have specific directories mounted from the host
-to reuse ``ccache`` and ``maven`` artifacts. These docker volumes are placed
-in the ``.docker`` directory.
-
-In order to clean up the cache simply delete one or more directories (or the
-whole ``.docker`` directory).
-
-
-Development
------------
-
-The docker-compose configuration is tuned towards reusable development
-containers using hierarchical images. For example multiple language bindings
-are dependent on the C++ implementation, so instead of redefining the
-C++ environment multiple Dockerfiles, we can reuse the exact same base C++
-image when building Glib, Ruby, R and Python bindings.
-This reduces duplication and streamlines maintenance, but makes the
-docker-compose configuration more complicated.
-
-Docker Build Parameters
-~~~~~~~~~~~~~~~~~~~~~~~
-
-The build time parameters are pushed down to the dockerfiles to make the
-image building more flexible. These parameters are usually called as docker
-build args, but we pass these values as environment variables to
-docker-compose.yml. The build parameters are extensively used for:
-
-- defining the docker registry used for caching
-- platform architectures
-- operation systems and versions
-- defining various versions if dependencies
-
-The default parameter values are stored in the top level .env file.
-For detailed examples see the docker-compose.yml.
-
-Build Scripts
-~~~~~~~~~~~~~
-
-The scripts maintainted under ci/scripts directory should be kept
-parametrizable but reasonably minimal to clearly encapsulate the tasks it is
-responsible for. Like:
-
-- ``cpp_build.sh``: build the C++ implementation without running the tests.
-- ``cpp_test.sh``: execute the C++ tests.
-- ``python_build.sh``: build the Python bindings without running the tests.
-- ``python_test.sh``: execute the python tests.
-- ``docs_build.sh``: build the Sphinx documentation.
-- ``integration_dask.sh``: execute the dask integration tests.
-- ``integration_pandas.sh``: execute the pandas integration tests.
-- ``install_minio.sh``: install minio server for multiple platforms.
-- ``install_conda.sh``: install miniconda for multiple platforms.
-
-The parametrization (like the C++ CMake options) is achieved via environment
-variables with useful defaults to keep the build configurations declarative.
-
-A good example is ``cpp_build.sh`` build script which forwards environment
-variables as CMake options - so the same scripts can be invoked in various
-configurations without the necessity of changing it. For examples see how the
-environment variables are passed in the docker-compose.yml's C++ images.
-
-Adding New Images
-~~~~~~~~~~~~~~~~~
-
-See the inline comments available in the docker-compose.yml file.
diff --git a/docs/source/developers/documentation.rst b/docs/source/developers/documentation.rst
deleted file mode 100644
index 4196a0c..0000000
--- a/docs/source/developers/documentation.rst
+++ /dev/null
@@ -1,103 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. _building-docs:
-
-Building the Documentation
-==========================
-
-Prerequisites
--------------
-
-The documentation build process uses `Doxygen <http://www.doxygen.nl/>`_ and
-`Sphinx <http://www.sphinx-doc.org/>`_ along with a few extensions.
-
-If you're using Conda, the required software can be installed in a single line:
-
-.. code-block:: shell
-
-   conda install -c conda-forge --file=ci/conda_env_sphinx.yml
-
-Otherwise, you'll first need to install `Doxygen <http://www.doxygen.nl/>`_
-yourself (for example from your distribution's official repositories, if
-using Linux).  Then you can install the Python-based requirements with the
-following command:
-
-.. code-block:: shell
-
-   pip install -r docs/requirements.txt
-
-Building
---------
-
-.. note::
-
-   If you are building the documentation on Windows, not all sections
-   may build properly.
-
-These two steps are mandatory and must be executed in order.
-
-#. Process the C++ API using Doxygen
-
-   .. code-block:: shell
-
-      pushd cpp/apidoc
-      doxygen
-      popd
-
-#. Build the complete documentation using Sphinx.
-
-   .. note::
-
-      This step requires the pyarrow library is installed
-      in your python environment.  One way to accomplish
-      this is to follow the build instructions at :ref:`python-development`
-      and then run ``python setup.py install`` in arrow/python
-      (it is best to do this in a dedicated conda/virtual environment).
-
-   .. code-block:: shell
-
-      pushd docs
-      make html
-      popd
-
-.. note::
-
-   Note that building the documentation may fail if your build of pyarrow is
-   not sufficiently comprehensive. Portions of the Python API documentation
-   will also not build without CUDA support having been built.
-
-After these steps are completed, the documentation is rendered in HTML
-format in ``docs/_build/html``.  In particular, you can point your browser
-at ``docs/_build/html/index.html`` to read the docs and review any changes
-you made.
-
-Building with Docker
---------------------
-
-You can use :ref:`Archery <archery>` to build the documentation within a
-Docker container.
-
-.. code-block:: shell
-
-  archery docker run ubuntu-docs
-
-The final output is located under ``docs/_build/html``.
-
-.. seealso::
-
-   :ref:`docker-builds`.
diff --git a/docs/source/developers/python.rst b/docs/source/developers/python.rst
deleted file mode 100644
index d1fe086..0000000
--- a/docs/source/developers/python.rst
+++ /dev/null
@@ -1,575 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. currentmodule:: pyarrow
-.. _python-development:
-
-==================
-Python Development
-==================
-
-This page provides general Python development guidelines and source build
-instructions for all platforms.
-
-Coding Style
-============
-
-We follow a similar PEP8-like coding style to the `pandas project
-<https://github.com/pandas-dev/pandas>`_.  To check style issues, use the
-:ref:`Archery <archery>` subcommand ``lint``:
-
-.. code-block:: shell
-
-   pip install -e arrow/dev/archery
-   pip install -r arrow/dev/archery/requirements-lint.txt
-
-.. code-block:: shell
-
-   archery lint --python
-
-Some of the issues can be automatically fixed by passing the ``--fix`` option:
-
-.. code-block:: shell
-
-   archery lint --python --fix
-
-Unit Testing
-============
-
-We are using `pytest <https://docs.pytest.org/en/latest/>`_ to develop our unit
-test suite. After building the project (see below) you can run its unit tests
-like so:
-
-.. code-block:: shell
-
-   pytest pyarrow
-
-Package requirements to run the unit tests are found in
-``requirements-test.txt`` and can be installed if needed with ``pip install -r
-requirements-test.txt``.
-
-The project has a number of custom command line options for its test
-suite. Some tests are disabled by default, for example. To see all the options,
-run
-
-.. code-block:: shell
-
-   pytest pyarrow --help
-
-and look for the "custom options" section.
-
-Test Groups
------------
-
-We have many tests that are grouped together using pytest marks. Some of these
-are disabled by default. To enable a test group, pass ``--$GROUP_NAME``,
-e.g. ``--parquet``. To disable a test group, prepend ``disable``, so
-``--disable-parquet`` for example. To run **only** the unit tests for a
-particular group, prepend ``only-`` instead, for example ``--only-parquet``.
-
-The test groups currently include:
-
-* ``gandiva``: tests for Gandiva expression compiler (uses LLVM)
-* ``hdfs``: tests that use libhdfs or libhdfs3 to access the Hadoop filesystem
-* ``hypothesis``: tests that use the ``hypothesis`` module for generating
-  random test cases. Note that ``--hypothesis`` doesn't work due to a quirk
-  with pytest, so you have to pass ``--enable-hypothesis``
-* ``large_memory``: Test requiring a large amount of system RAM
-* ``orc``: Apache ORC tests
-* ``parquet``: Apache Parquet tests
-* ``plasma``: Plasma Object Store tests
-* ``s3``: Tests for Amazon S3
-* ``tensorflow``: Tests that involve TensorFlow
-* ``flight``: Flight RPC tests
-
-Benchmarking
-------------
-
-For running the benchmarks, see :ref:`python-benchmarks`.
-
-Building on Linux and MacOS
-=============================
-
-System Requirements
--------------------
-
-On macOS, any modern XCode (6.4 or higher; the current version is 10) is
-sufficient.
-
-On Linux, for this guide, we require a minimum of gcc 4.8, or clang 3.7 or
-higher. You can check your version by running
-
-.. code-block:: shell
-
-   $ gcc --version
-
-If the system compiler is older than gcc 4.8, it can be set to a newer version
-using the ``$CC`` and ``$CXX`` environment variables:
-
-.. code-block:: shell
-
-   export CC=gcc-4.8
-   export CXX=g++-4.8
-
-Environment Setup and Build
----------------------------
-
-First, let's clone the Arrow git repository:
-
-.. code-block:: shell
-
-   mkdir repos
-   cd repos
-   git clone https://github.com/apache/arrow.git
-
-You should now see
-
-.. code-block:: shell
-
-   $ ls -l
-   total 8
-   drwxrwxr-x 12 wesm wesm 4096 Apr 15 19:19 arrow/
-
-Pull in the test data and setup the environment variables:
-
-.. code-block:: shell
-
-   pushd arrow
-   git submodule init
-   git submodule update
-   export PARQUET_TEST_DATA="${PWD}/cpp/submodules/parquet-testing/data"
-   export ARROW_TEST_DATA="${PWD}/testing/data"
-   popd
-
-Using Conda
-~~~~~~~~~~~
-
-.. note::
-
-   Using conda to build Arrow on macOS is complicated by the
-   fact that the `conda-forge compilers require an older macOS SDK <https://stackoverflow.com/a/55798942>`_.
-   Conda offers some `installation instructions <https://docs.conda.io/projects/conda-build/en/latest/resources/compiler-tools.html#macos-sdk>`_;
-   the alternative would be to use :ref:`Homebrew <python-homebrew>` and
-   ``pip`` instead.
-
-Let's create a conda environment with all the C++ build and Python dependencies
-from conda-forge, targeting development for Python 3.7:
-
-On Linux and macOS:
-
-.. code-block:: shell
-
-    conda create -y -n pyarrow-dev -c conda-forge \
-        --file arrow/ci/conda_env_unix.yml \
-        --file arrow/ci/conda_env_cpp.yml \
-        --file arrow/ci/conda_env_python.yml \
-        --file arrow/ci/conda_env_gandiva.yml \
-        compilers \
-        python=3.7 \
-        pandas
-
-As of January 2019, the ``compilers`` package is needed on many Linux
-distributions to use packages from conda-forge.
-
-With this out of the way, you can now activate the conda environment
-
-.. code-block:: shell
-
-   conda activate pyarrow-dev
-
-For Windows, see the `Building on Windows`_ section below.
-
-We need to set some environment variables to let Arrow's build system know
-about our build toolchain:
-
-.. code-block:: shell
-
-   export ARROW_HOME=$CONDA_PREFIX
-
-Using pip
-~~~~~~~~~
-
-.. warning::
-
-   If you installed Python using the Anaconda distribution or `Miniconda
-   <https://conda.io/miniconda.html>`_, you cannot currently use ``virtualenv``
-   to manage your development. Please follow the conda-based development
-   instructions instead.
-
-.. _python-homebrew:
-
-On macOS, use Homebrew to install all dependencies required for
-building Arrow C++:
-
-.. code-block:: shell
-
-   brew update && brew bundle --file=arrow/cpp/Brewfile
-
-See :ref:`here <cpp-build-dependency-management>` for a list of dependencies you
-may need.
-
-On Debian/Ubuntu, you need the following minimal set of dependencies. All other
-dependencies will be automatically built by Arrow's third-party toolchain.
-
-.. code-block:: shell
-
-   $ sudo apt-get install libjemalloc-dev libboost-dev \
-                          libboost-filesystem-dev \
-                          libboost-system-dev \
-                          libboost-regex-dev \
-                          python-dev \
-                          autoconf \
-                          flex \
-                          bison
-
-If you are building Arrow for Python 3, install ``python3-dev`` instead of ``python-dev``.
-
-On Arch Linux, you can get these dependencies via pacman.
-
-.. code-block:: shell
-
-   $ sudo pacman -S jemalloc boost
-
-Now, let's create a Python virtualenv with all Python dependencies in the same
-folder as the repositories and a target installation folder:
-
-.. code-block:: shell
-
-   virtualenv pyarrow
-   source ./pyarrow/bin/activate
-   pip install -r arrow/python/requirements-build.txt \
-        -r arrow/python/requirements-test.txt
-
-   # This is the folder where we will install the Arrow libraries during
-   # development
-   mkdir dist
-
-If your cmake version is too old on Linux, you could get a newer one via
-``pip install cmake``.
-
-We need to set some environment variables to let Arrow's build system know
-about our build toolchain:
-
-.. code-block:: shell
-
-   export ARROW_HOME=$(pwd)/dist
-   export LD_LIBRARY_PATH=$(pwd)/dist/lib:$LD_LIBRARY_PATH
-
-Build and test
---------------
-
-Now build and install the Arrow C++ libraries:
-
-.. code-block:: shell
-
-   mkdir arrow/cpp/build
-   pushd arrow/cpp/build
-
-   cmake -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \
-         -DCMAKE_INSTALL_LIBDIR=lib \
-         -DARROW_WITH_BZ2=ON \
-         -DARROW_WITH_ZLIB=ON \
-         -DARROW_WITH_ZSTD=ON \
-         -DARROW_WITH_LZ4=ON \
-         -DARROW_WITH_SNAPPY=ON \
-         -DARROW_WITH_BROTLI=ON \
-         -DARROW_PARQUET=ON \
-         -DARROW_PYTHON=ON \
-         -DARROW_BUILD_TESTS=ON \
-         ..
-   make -j4
-   make install
-   popd
-
-There are a number of optional components that can can be switched ON by
-adding flags with ``ON``:
-
-* ``ARROW_FLIGHT``: RPC framework
-* ``ARROW_GANDIVA``: LLVM-based expression compiler
-* ``ARROW_ORC``: Support for Apache ORC file format
-* ``ARROW_PARQUET``: Support for Apache Parquet file format
-* ``ARROW_PLASMA``: Shared memory object store
-
-Anything set to ``ON`` above can also be turned off. Note that some compression
-libraries are needed for Parquet support.
-
-If multiple versions of Python are installed in your environment, you may have
-to pass additional parameters to cmake so that it can find the right
-executable, headers and libraries.  For example, specifying
-``-DPython3_EXECUTABLE=$VIRTUAL_ENV/bin/python`` (assuming that you're in
-virtualenv) enables cmake to choose the python executable which you are using.
-
-.. note::
-
-   On Linux systems with support for building on multiple architectures,
-   ``make`` may install libraries in the ``lib64`` directory by default. For
-   this reason we recommend passing ``-DCMAKE_INSTALL_LIBDIR=lib`` because the
-   Python build scripts assume the library directory is ``lib``
-
-.. note::
-
-   If you have conda installed but are not using it to manage dependencies,
-   and you have trouble building the C++ library, you may need to set
-   ``-DARROW_DEPENDENCY_SOURCE=AUTO`` or some other value (described
-   :ref:`here <cpp-build-dependency-management>`)
-   to explicitly tell CMake not to use conda.
-
-.. note::
-
-   With older versions of ``cmake`` (<3.15) you might need to pass ``-DPYTHON_EXECUTABLE``
-   instead of ``-DPython3_EXECUTABLE``. See `cmake documentation <https://cmake.org/cmake/help/latest/module/FindPython3.html#artifacts-specification>`
-   for more details.
-
-For any other C++ build challenges, see :ref:`cpp-development`.
-
-Now, build pyarrow:
-
-.. code-block:: shell
-
-   pushd arrow/python
-   export PYARROW_WITH_PARQUET=1
-   python setup.py build_ext --inplace
-   popd
-
-If you did not build one of the optional components, set the corresponding
-``PYARROW_WITH_$COMPONENT`` environment variable to 0.
-
-Now you are ready to install test dependencies and run `Unit Testing`_, as
-described above.
-
-To build a self-contained wheel (including the Arrow and Parquet C++
-libraries), one can set ``--bundle-arrow-cpp``:
-
-.. code-block:: shell
-
-   pip install wheel  # if not installed
-   python setup.py build_ext --build-type=$ARROW_BUILD_TYPE \
-          --bundle-arrow-cpp bdist_wheel
-
-Docker examples
-~~~~~~~~~~~~~~~
-
-If you are having difficulty building the Python library from source, take a
-look at the ``python/examples/minimal_build`` directory which illustrates a
-complete build and test from source both with the conda and pip/virtualenv
-build methods.
-
-Building with CUDA support
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The :mod:`pyarrow.cuda` module offers support for using Arrow platform
-components with Nvidia's CUDA-enabled GPU devices. To build with this support,
-pass ``-DARROW_CUDA=ON`` when building the C++ libraries, and set the following
-environment variable when building pyarrow:
-
-.. code-block:: shell
-
-   export PYARROW_WITH_CUDA=1
-
-Debugging
----------
-
-Since pyarrow depends on the Arrow C++ libraries, debugging can
-frequently involve crossing between Python and C++ shared libraries.
-
-Using gdb on Linux
-~~~~~~~~~~~~~~~~~~
-
-To debug the C++ libraries with gdb while running the Python unit
-   test, first start pytest with gdb:
-
-.. code-block:: shell
-
-   gdb --args python -m pytest pyarrow/tests/test_to_run.py -k $TEST_TO_MATCH
-
-To set a breakpoint, use the same gdb syntax that you would when
-debugging a C++ unittest, for example:
-
-.. code-block:: shell
-
-   (gdb) b src/arrow/python/arrow_to_pandas.cc:1874
-   No source file named src/arrow/python/arrow_to_pandas.cc.
-   Make breakpoint pending on future shared library load? (y or [n]) y
-   Breakpoint 1 (src/arrow/python/arrow_to_pandas.cc:1874) pending.
-
-Building on Windows
-===================
-
-Building on Windows requires one of the following compilers to be installed:
-
-- `Build Tools for Visual Studio 2017 <https://download.visualstudio.microsoft.com/download/pr/3e542575-929e-4297-b6c6-bef34d0ee648/639c868e1219c651793aff537a1d3b77/vs_buildtools.exe>`_
-- `Microsoft Build Tools 2015 <http://download.microsoft.com/download/5/F/7/5F7ACAEB-8363-451F-9425-68A90F98B238/visualcppbuildtools_full.exe>`_
-- Visual Studio 2015
-- Visual Studio 2017
-
-During the setup of Build Tools ensure at least one Windows SDK is selected.
-
-Visual Studio 2019 and its build tools are currently not supported.
-
-We bootstrap a conda environment similar to above, but skipping some of the
-Linux/macOS-only packages:
-
-First, starting from fresh clones of Apache Arrow:
-
-.. code-block:: shell
-
-   git clone https://github.com/apache/arrow.git
-
-.. code-block:: shell
-
-   conda create -y -n pyarrow-dev -c conda-forge ^
-       --file arrow\ci\conda_env_cpp.yml ^
-       --file arrow\ci\conda_env_python.yml ^
-       --file arrow\ci\conda_env_gandiva.yml ^
-       python=3.7
-   conda activate pyarrow-dev
-
-Now, we build and install Arrow C++ libraries.
-
-We set a number of environment variables:
-
-- the path of the installation directory of the Arrow C++ libraries as
-  ``ARROW_HOME``
-- add the path of installed DLL libraries to ``PATH``
-- and choose the compiler to be used
-
-.. code-block:: shell
-
-   set ARROW_HOME=%cd%\arrow-dist
-   set PATH=%ARROW_HOME%\bin;%PATH%
-   set PYARROW_CMAKE_GENERATOR=Visual Studio 15 2017 Win64
-
-This assumes Visual Studio 2017 or its build tools are used. For Visual Studio
-2015 and its build tools use the following instead:
-
-.. code-block:: shell
-
-   set PYARROW_CMAKE_GENERATOR=Visual Studio 14 2015 Win64
-
-Let's configure, build and install the Arrow C++ libraries:
-
-.. code-block:: shell
-
-   mkdir arrow\cpp\build
-   pushd arrow\cpp\build
-   cmake -G "%PYARROW_CMAKE_GENERATOR%" ^
-       -DCMAKE_INSTALL_PREFIX=%ARROW_HOME% ^
-       -DCMAKE_UNITY_BUILD=ON ^
-       -DARROW_CXXFLAGS="/WX /MP" ^
-       -DARROW_WITH_LZ4=on ^
-       -DARROW_WITH_SNAPPY=on ^
-       -DARROW_WITH_ZLIB=on ^
-       -DARROW_WITH_ZSTD=on ^
-       -DARROW_PARQUET=on ^
-       -DARROW_PYTHON=on ^
-       ..
-   cmake --build . --target INSTALL --config Release
-   popd
-
-Now, we can build pyarrow:
-
-.. code-block:: shell
-
-   pushd arrow\python
-   set PYARROW_WITH_PARQUET=1
-   python setup.py build_ext --inplace
-   popd
-
-.. note::
-
-   For building pyarrow, the above defined environment variables need to also
-   be set. Remember this if to want to re-build ``pyarrow`` after your initial build.
-
-Then run the unit tests with:
-
-.. code-block:: shell
-
-   pushd arrow\python
-   py.test pyarrow -v
-   popd
-
-.. note::
-
-   With the above instructions the Arrow C++ libraries are not bundled with
-   the Python extension. This is recommended for development as it allows the
-   C++ libraries to be re-built separately.
-
-   As a consequence however, ``python setup.py install`` will also not install
-   the Arrow C++ libraries. Therefore, to use ``pyarrow`` in python, ``PATH``
-   must contain the directory with the Arrow .dll-files.
-
-   If you want to bundle the Arrow C++ libraries with ``pyarrow`` add
-   ``--bundle-arrow-cpp`` as build parameter:
-
-   ``python setup.py build_ext --bundle-arrow-cpp``
-
-   Important: If you combine ``--bundle-arrow-cpp`` with ``--inplace`` the
-   Arrow C++ libraries get copied to the python source tree and are not cleared
-   by ``python setup.py clean``. They remain in place and will take precedence
-   over any later Arrow C++ libraries contained in ``PATH``. This can lead to
-   incompatibilities when ``pyarrow`` is later built without
-   ``--bundle-arrow-cpp``.
-
-Running C++ unit tests for Python integration
----------------------------------------------
-
-Running C++ unit tests should not be necessary for most developers. If you do
-want to run them, you need to pass ``-DARROW_BUILD_TESTS=ON`` during
-configuration of the Arrow C++ library build:
-
-.. code-block:: shell
-
-   mkdir arrow\cpp\build
-   pushd arrow\cpp\build
-   cmake -G "%PYARROW_CMAKE_GENERATOR%" ^
-       -DCMAKE_INSTALL_PREFIX=%ARROW_HOME% ^
-       -DARROW_CXXFLAGS="/WX /MP" ^
-       -DARROW_PARQUET=on ^
-       -DARROW_PYTHON=on ^
-       -DARROW_BUILD_TESTS=ON ^
-       ..
-   cmake --build . --target INSTALL --config Release
-   popd
-
-
-Getting ``arrow-python-test.exe`` (C++ unit tests for python integration) to
-run is a bit tricky because your ``%PYTHONHOME%`` must be configured to point
-to the active conda environment:
-
-.. code-block:: shell
-
-   set PYTHONHOME=%CONDA_PREFIX%
-   pushd arrow\cpp\build\release\Release
-   arrow-python-test.exe
-   popd
-
-To run all tests of the Arrow C++ library, you can also run ``ctest``:
-
-.. code-block:: shell
-
-   set PYTHONHOME=%CONDA_PREFIX%
-   pushd arrow\cpp\build
-   ctest
-   popd
-
-Windows Caveats
----------------
-
-Some components are not supported yet on Windows:
-
-* Flight RPC
-* Plasma
diff --git a/docs/source/example.gz b/docs/source/example.gz
deleted file mode 100644
index 4fc6040..0000000
Binary files a/docs/source/example.gz and /dev/null differ
diff --git a/docs/source/format/Arrow.graffle b/docs/source/format/Arrow.graffle
deleted file mode 100644
index f4eead9..0000000
Binary files a/docs/source/format/Arrow.graffle and /dev/null differ
diff --git a/docs/source/format/Arrow.png b/docs/source/format/Arrow.png
deleted file mode 100644
index 1b09aa2..0000000
Binary files a/docs/source/format/Arrow.png and /dev/null differ
diff --git a/docs/source/format/CDataInterface.rst b/docs/source/format/CDataInterface.rst
deleted file mode 100644
index e4ba009..0000000
--- a/docs/source/format/CDataInterface.rst
+++ /dev/null
@@ -1,945 +0,0 @@
-.. Licensed to the Apache Software Foundation (ASF) under one
-.. or more contributor license agreements.  See the NOTICE file
-.. distributed with this work for additional information
-.. regarding copyright ownership.  The ASF licenses this file
-.. to you under the Apache License, Version 2.0 (the
-.. "License"); you may not use this file except in compliance
-.. with the License.  You may obtain a copy of the License at
-
-..   http://www.apache.org/licenses/LICENSE-2.0
-
-.. Unless required by applicable law or agreed to in writing,
-.. software distributed under the License is distributed on an
-.. "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-.. KIND, either express or implied.  See the License for the
-.. specific language governing permissions and limitations
-.. under the License.
-
-.. _c-data-interface:
-
-==========================
-The Arrow C data interface
-==========================
-
-Rationale
-=========
-
-Apache Arrow is designed to be a universal in-memory format for the representation
-of tabular ("columnar") data. However, some projects may face a difficult
-choice between either depending on a fast-evolving project such as the
-Arrow C++ library, or having to reimplement adapters for data interchange,
-which may require significant, redundant development effort.
-
-The Arrow C data interface defines a very small, stable set of C definitions
-that can be easily *copied* in any project's source code and used for columnar
-data interchange in the Arrow format.  For non-C/C++ languages and runtimes,
-it should be almost as easy to translate the C definitions into the
-corresponding C FFI declarations.
-
-Applications and libraries can therefore work with Arrow memory without
-necessarily using Arrow libraries or reinventing the wheel. Developers can
-choose between tight integration
-with the Arrow *software project* (benefitting from the growing array of
-facilities exposed by e.g. the C++ or Java implementations of Apache Arrow,
-but with the cost of a dependency) or minimal integration with the Arrow
-*format* only.
-
-Goals
------
-
-* Expose an ABI-stable interface.
-* Make it easy for third-party projects to implement support for (including partial
-  support where sufficient), with little initial investment.
-* Allow zero-copy sharing of Arrow data between independent runtimes
-  and components running in the same process.
-* Match the Arrow array concepts closely to avoid the development of
-  yet another marshalling layer.
-* Avoid the need for one-to-one adaptation layers such as the limited
-  JPype-based bridge between Java and Python.
-* Enable integration without an explicit dependency (either at compile-time
-  or runtime) on the Arrow software project.
-
-Ideally, the Arrow C data interface can become a low-level *lingua franca*
-for sharing columnar data at runtime and establish Arrow as the universal
-building block in the columnar processing ecosystem.
-
-Non-goals
----------
-
-* Expose a C API mimicking operations available in higher-level runtimes
-  (such as C++, Java...).
-* Data sharing between distinct processes or storage persistence.
-
-
-Comparison with the Arrow IPC format
-------------------------------------
-
-Pros of the C data interface vs. the IPC format:
-
-* No dependency on Flatbuffers.
-* No buffer reassembly (data is already exposed in logical Arrow format).
-* Zero-copy by design.
-* Easy to reimplement from scratch.
-* Minimal C definition that can be easily copied into other codebases.
-* Resource lifetime management through a custom release callback.
-
-Pros of the IPC format vs. the data interface:
-
-* Works across processes and machines.
-* Allows data storage and persistence.
-* Being a streamable format, the IPC format has room for composing more features
-  (such as integrity checks, compression...).
-* Does not require explicit C data access.
-
-Data type description -- format strings
-=======================================
-
-A data type is described using a format string.  The format string only
-encodes information about the top-level type; for nested type, child types
-are described separately.  Also, metadata is encoded in a separate string.
-
-The format strings are designed to be easily parsable, even from a language
-such as C.  The most common primitive formats have one-character format
-strings:
-
-+-----------------+--------------------------+------------+
-| Format string   | Arrow data type          | Notes      |
-+=================+==========================+============+
-| ``n``           | null                     |            |
-+-----------------+--------------------------+------------+
-| ``b``           | boolean                  |            |
-+-----------------+--------------------------+------------+
-| ``c``           | int8                     |            |
-+-----------------+--------------------------+------------+
-| ``C``           | uint8                    |            |
-+-----------------+--------------------------+------------+
-| ``s``           | int16                    |            |
-+-----------------+--------------------------+------------+
-| ``S``           | uint16                   |            |
-+-----------------+--------------------------+------------+
-| ``i``           | int32                    |            |
-+-----------------+--------------------------+------------+
-| ``I``           | uint32                   |            |
-+-----------------+--------------------------+------------+
-| ``l``           | int64                    |            |
-+-----------------+--------------------------+------------+
-| ``L``           | uint64                   |            |
-+-----------------+--------------------------+------------+
-| ``e``           | float16                  |            |
-+-----------------+--------------------------+------------+
-| ``f``           | float32                  |            |
-+-----------------+--------------------------+------------+
-| ``g``           | float64                  |            |
-+-----------------+--------------------------+------------+
-
-+-----------------+---------------------------------------------------+------------+
-| Format string   | Arrow data type                                   | Notes      |
-+=================+===================================================+============+
-| ``z``           | binary                                            |            |
-+-----------------+---------------------------------------------------+------------+
-| ``Z``           | large binary                                      |            |
-+-----------------+---------------------------------------------------+------------+
-| ``u``           | utf-8 string                                      |            |
-+-----------------+---------------------------------------------------+------------+
-| ``U``           | large utf-8 string                                |            |
-+-----------------+---------------------------------------------------+------------+
-| ``d:19,10``     | decimal128 [precision 19, scale 10]               |            |
-+-----------------+---------------------------------------------------+------------+
-| ``d:19,10,NNN`` | decimal bitwidth = NNN [precision 19, scale 10]   |            |
-+-----------------+---------------------------------------------------+------------+
-| ``w:42``        | fixed-width binary [42 bytes]                     |            |
-+-----------------+---------------------------------------------------+------------+
-
-Temporal types have multi-character format strings starting with ``t``:
-
-+-----------------+---------------------------------------------------+------------+
-| Format string   | Arrow data type                                   | Notes      |
-+=================+===================================================+============+
-| ``tdD``         | date32 [days]                                     |            |
-+-----------------+---------------------------------------------------+------------+
-| ``tdm``         | date64 [milliseconds]                             |            |
-+-----------------+---------------------------------------------------+------------+
-| ``tts``         | time32 [seconds]                                  |            |
-+-----------------+---------------------------------------------------+------------+
-| ``ttm``         | time32 [milliseconds]                             |            |
-+-----------------+---------------------------------------------------+------------+
-| ``ttu``         | time64 [microseconds]                             |            |
-+-----------------+---------------------------------------------------+------------+
-| ``ttn``         | time64 [nanoseconds]                              |            |
-+-----------------+---------------------------------------------------+------------+
-| ``tss:...``     | timestamp [seconds] with timezone "..."           | \(1)       |
-+-----------------+---------------------------------------------------+------------+
-| ``tsm:...``     | timestamp [milliseconds] with timezone "..."      | \(1)       |
-+-----------------+---------------------------------------------------+------------+
-| ``tsu:...``     | timestamp [microseconds] with timezone "..."      | \(1)       |
-+-----------------+---------------------------------------------------+------------+
-| ``tsn:...``     | timestamp [nanoseconds] with timezone "..."       | \(1)       |
-+-----------------+---------------------------------------------------+------------+
-| ``tDs``         | duration [seconds]                                |            |
-+-----------------+---------------------------------------------------+------------+
-| ``tDm``         | duration [milliseconds]                           |            |
-+-----------------+---------------------------------------------------+------------+
-| ``tDu``         | duration [microseconds]                           |            |
-+-----------------+---------------------------------------------------+------------+
-| ``tDn``         | duration [nanoseconds]                            |            |
-+-----------------+---------------------------------------------------+------------+
-| ``tiM``         | interval [months]                                 |            |
-+-----------------+---------------------------------------------------+------------+
-| ``tiD``         | interval [days, time]                             |            |
-+-----------------+---------------------------------------------------+------------+
-
-Dictionary-encoded types do not have a specific format string.  Instead, the
-format string of the base array represents the dictionary index type, and the
-value type can be read from the dependent dictionary array (see below
-"Dictionary-encoded arrays").
-
-Nested types have multiple-character format strings starting with ``+``.  The
-names and types of child fields are read from the child arrays.
-
-+------------------------+---------------------------------------------------+------------+
-| Format string          | Arrow data type                                   | Notes      |
-+========================+===================================================+============+
-| ``+l``                 | list                                              |            |
-+------------------------+---------------------------------------------------+------------+
-| ``+L``                 | large list                                        |            |
-+------------------------+---------------------------------------------------+------------+
-| ``+w:123``             | fixed-sized list [123 items]                      |            |
-+------------------------+---------------------------------------------------+------------+
-| ``+s``                 | struct                                            |            |
-+------------------------+---------------------------------------------------+------------+
-| ``+m``                 | map                                               | \(2)       |
-+------------------------+---------------------------------------------------+------------+
-| ``+ud:I,J,...``        | dense union with type ids I,J...                  |            |
-+------------------------+---------------------------------------------------+------------+
-| ``+us:I,J,...``        | sparse union with type ids I,J...                 |            |
-+------------------------+---------------------------------------------------+------------+
-
-Notes:
-
-(1)
-   The timezone string is appended as-is after the colon character ``:``, without
-   any quotes.  If the timezone is empty, the colon ``:`` must still be included.
-
-(2)
-   As specified in the Arrow columnar format, the map type has a single child type
-   named ``entries``, itself a 2-child struct type of ``(key, value)``.
-
-Examples
---------
-
-* A dictionary-encoded ``decimal128(precision = 12, scale = 5)`` array
-  with ``int16`` indices has format string ``s``, and its dependent dictionary
-  array has format string ``d:12,5``.
-* A ``list<uint64>`` array has format string ``+l``, and its single child
-  has format string ``L``.
-* A ``struct<ints: int32, floats: float32>`` has format string ``+s``; its two
-  children have names ``ints`` and ``floats``, and format strings ``i`` and
-  ``f`` respectively.
-* A ``map<string, float64>`` array has format string ``+m``; its single child
-  has name ``entries`` and format string ``+s``; its two grandchildren have names
-  ``key`` and ``value``, and format strings ``u`` and ``g`` respectively.
-* A ``sparse_union<ints: int32, floats: float32>`` with type ids ``4, 5``
-  has format string ``+us:4,5``; its two children have names ``ints`` and
-  ``floats``, and format strings ``i`` and ``f`` respectively.
-
-
-Structure definitions
-=====================
-
-The following free-standing definitions are enough to support the Arrow
-C data interface in your project.  Like the rest of the Arrow project, they
-are available under the Apache License 2.0.
-
-.. code-block:: c
-
-   #define ARROW_FLAG_DICTIONARY_ORDERED 1
-   #define ARROW_FLAG_NULLABLE 2
-   #define ARROW_FLAG_MAP_KEYS_SORTED 4
-
-   struct ArrowSchema {
-     // Array type description
-     const char* format;
-     const char* name;
-     const char* metadata;
-     int64_t flags;
-     int64_t n_children;
-     struct ArrowSchema** children;
-     struct ArrowSchema* dictionary;
-
-     // Release callback
-     void (*release)(struct ArrowSchema*);
-     // Opaque producer-specific data
-     void* private_data;
-   };
-
-   struct ArrowArray {
-     // Array data description
-     int64_t length;
-     int64_t null_count;
-     int64_t offset;
-     int64_t n_buffers;
-     int64_t n_children;
-     const void** buffers;
-     struct ArrowArray** children;
-     struct ArrowArray* dictionary;
-
-     // Release callback
-     void (*release)(struct ArrowArray*);
-     // Opaque producer-specific data
-     void* private_data;
-   };
-
-The ArrowSchema structure
--------------------------
-
-The ``ArrowSchema`` structure describes the type and metadata of an exported
-array or record batch.  It has the following fields:
-
-.. c:member:: const char* ArrowSchema.format
-
-   Mandatory.  A null-terminated, UTF8-encoded string describing
-   the data type.  If the data type is nested, child types are not
-   encoded here but in the :c:member:`ArrowSchema.children` structures.
-
-   Consumers MAY decide not to support all data types, but they
-   should document this limitation.
-
-.. c:member:: const char* ArrowSchema.name
-
-   Optional.  A null-terminated, UTF8-encoded string of the field
-   or array name.  This is mainly used to reconstruct child fields
-   of nested types.
-
-   Producers MAY decide not to provide this information, and consumers
-   MAY decide to ignore it.  If omitted, MAY be NULL or an empty string.
-
-.. c:member:: const char* ArrowSchema.metadata
-
-   Optional.  A binary string describing the type's metadata.
-   If the data type is nested, child types are not encoded here but
-   in the :c:member:`ArrowSchema.children` structures.
-
-   This string is not null-terminated but follows a specific format::
-
-      int32: number of key/value pairs (noted N below)
-      int32: byte length of key 0
-      key 0 (not null-terminated)
-      int32: byte length of value 0
-      value 0 (not null-terminated)
-      ...
-      int32: byte length of key N - 1
-      key N - 1 (not null-terminated)
-      int32: byte length of value N - 1
-      value N - 1 (not null-terminated)
-
-   Integers are stored in native endianness.  For example, the metadata
-   ``[('key1', 'value1')]`` is encoded on a little-endian machine as::
-
-      \x01\x00\x00\x00\x04\x00\x00\x00key1\x06\x00\x00\x00value1
-
-   On a big-endian machine, the same example would be encoded as::
-
-      \x00\x00\x00\x01\x00\x00\x00\x04key1\x00\x00\x00\x06value1
-
-   If omitted, this field MUST be NULL (not an empty string).
-
-   Consumers MAY choose to ignore this information.
-
-.. c:member:: int64_t ArrowSchema.flags
-
-   Optional.  A bitfield of flags enriching the type description.
-   Its value is computed by OR'ing together the flag values.
-   The following flags are available:
-
-   * ``ARROW_FLAG_NULLABLE``: whether this field is semantically nullable
-     (regardless of whether it actually has null values).
-   * ``ARROW_FLAG_DICTIONARY_ORDERED``: for dictionary-encoded types,
-     whether the ordering of dictionary indices is semantically meaningful.
-   * ``ARROW_FLAG_MAP_KEYS_SORTED``: for map types, whether the keys within
-     each map value are sorted.
-
-   If omitted, MUST be 0.
-
-   Consumers MAY choose to ignore some or all of the flags.  Even then,
-   they SHOULD keep this value around so as to propagate its information
-   to their own consumers.
-
-.. c:member:: int64_t ArrowSchema.n_children
-
-   Mandatory.  The number of children this type has.
-
-.. c:member:: ArrowSchema** ArrowSchema.children
-
-   Optional.  A C array of pointers to each child type of this type.
-   There must be :c:member:`ArrowSchema.n_children` pointers.
-
-   MAY be NULL only if :c:member:`ArrowSchema.n_children` is 0.
-
-.. c:member:: ArrowSchema* ArrowSchema.dictionary
-
-   Optional.  A pointer to the type of dictionary values.
-
-   MUST be present if the ArrowSchema represents a dictionary-encoded type.
-   MUST be NULL otherwise.
-
-.. c:member:: void (*ArrowSchema.release)(struct ArrowSchema*)
-
-   Mandatory.  A pointer to a producer-provided release callback.
-
-   See below for memory management and release callback semantics.
-
-.. c:member:: void* ArrowSchema.private_data
-
-   Optional.  An opaque pointer to producer-provided private data.
-
-   Consumers MUST not process this member.  Lifetime of this member
-   is handled by the producer, and especially by the release callback.
-
-
-The ArrowArray structure
-------------------------
-
-The ``ArrowArray`` describes the data of an exported array or record batch.
-For the ``ArrowArray`` structure to be interpreted type, the array type
-or record batch schema must already be known.  This is either done by
-convention -- for example a producer API that always produces the same data
-type -- or by passing a ``ArrowSchema`` on the side.
-
-It has the following fields:
-
-.. c:member:: int64_t ArrowArray.length
-
-   Mandatory.  The logical length of the array (i.e. its number of items).
-
-.. c:member:: int64_t ArrowArray.null_count
-
-   Mandatory.  The number of null items in the array.  MAY be -1 if not
-   yet computed.
-
-.. c:member:: int64_t ArrowArray.offset
-
-   Mandatory.  The logical offset inside the array (i.e. the number of items
-   from the physical start of the buffers).  MUST be 0 or positive.
-
-   Producers MAY specify that they will only produce 0-offset arrays to
-   ease implementation of consumer code.
-   Consumers MAY decide not to support non-0-offset arrays, but they
-   should document this limitation.
-
-.. c:member:: int64_t ArrowArray.n_buffers
-
-   Mandatory.  The number of physical buffers backing this array.  The
-   number of buffers is a function of the data type, as described in the
-   :ref:`Columnar format specification <format_columnar>`.
-
-   Buffers of children arrays are not included.
-
-.. c:member:: const void** ArrowArray.buffers
-
-   Mandatory.  A C array of pointers to the start of each physical buffer
-   backing this array.  Each `void*` pointer is the physical start of
-   a contiguous buffer.  There must be :c:member:`ArrowArray.n_buffers` pointers.
-
-   The producer MUST ensure that each contiguous buffer is large enough to
-   represent `length + offset` values encoded according to the
-   :ref:`Columnar format specification <format_columnar>`.
-
-   It is recommended, but not required, that the memory addresses of the
-   buffers be aligned at least according to the type of primitive data that
-   they contain. Consumers MAY decide not to support unaligned memory.
-
-   The pointer to the null bitmap buffer, if the data type specifies one,
-   MAY be NULL only if :c:member:`ArrowArray.null_count` is 0.
-
-   Buffers of children arrays are not included.
-
-.. c:member:: int64_t ArrowArray.n_children
-
-   Mandatory.  The number of children this array has.  The number of children
-   is a function of the data type, as described in the
-   :ref:`Columnar format specification <format_columnar>`.
... 10532 lines suppressed ...

[arrow-rs] 13/14: Changed references to DF and Ballista in Cargo.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit 13af12d93824530f84b8f9a336036394ed9c39c6
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:24:10 2021 +0000

    Changed references to DF and Ballista in Cargo.
---
 rust/Cargo.toml | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/rust/Cargo.toml b/rust/Cargo.toml
index de26f87..de7d36f 100644
--- a/rust/Cargo.toml
+++ b/rust/Cargo.toml
@@ -21,14 +21,11 @@ members = [
         "parquet",
         "parquet_derive",
         "parquet_derive_test",
-        "datafusion",
-        "datafusion-examples",
         "arrow-flight",
         "integration-testing",
-	"benchmarks",
 ]
 
 # this package is excluded because it requires different compilation flags, thereby significantly changing
 # how it is compiled within the workspace, causing the whole workspace to be compiled from scratch
 # this way, this is a stand-alone package that compiles independently of the others.
-exclude = ["arrow-pyarrow-integration-testing", "ballista"]
+exclude = ["arrow-pyarrow-integration-testing"]

[arrow-rs] 10/14: Removed csharp.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit a8a652068cf67f071c47379db65a54c116cb0bbe
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:22:34 2021 +0000

    Removed csharp.
---
 csharp/.editorconfig                               | 169 ----
 csharp/.gitattributes                              |  36 -
 csharp/.gitignore                                  | 267 ------
 csharp/Apache.Arrow.sln                            |  61 --
 csharp/ApacheArrow.snk                             | Bin 596 -> 0 bytes
 csharp/Directory.Build.props                       |  59 --
 csharp/Directory.Build.targets                     |  29 -
 csharp/README.md                                   | 184 -----
 csharp/examples/Examples.sln                       |  31 -
 .../FluentBuilderExample.csproj                    |  12 -
 csharp/examples/FluentBuilderExample/Program.cs    |  61 --
 .../Apache.Arrow.Flight.AspNetCore.csproj          |  15 -
 .../FlightIEndpointRouteBuilderExtensions.cs       |  28 -
 .../FlightIGrpcServerBuilderExtensions.cs          |  30 -
 .../Apache.Arrow.Flight/Apache.Arrow.Flight.csproj |  21 -
 .../src/Apache.Arrow.Flight/Client/FlightClient.cs | 120 ---
 .../Client/FlightClientRecordBatchStreamReader.cs  |  28 -
 .../Client/FlightClientRecordBatchStreamWriter.cs  |  56 --
 .../Client/FlightRecordBatchDuplexStreamingCall.cs |  93 ---
 .../Client/FlightRecordBatchStreamingCall.cs       |  83 --
 csharp/src/Apache.Arrow.Flight/FlightAction.cs     |  75 --
 csharp/src/Apache.Arrow.Flight/FlightActionType.cs |  61 --
 csharp/src/Apache.Arrow.Flight/FlightCriteria.cs   |  70 --
 csharp/src/Apache.Arrow.Flight/FlightDescriptor.cs | 102 ---
 .../Apache.Arrow.Flight/FlightDescriptorType.cs    |  23 -
 csharp/src/Apache.Arrow.Flight/FlightEndpoint.cs   |  73 --
 csharp/src/Apache.Arrow.Flight/FlightInfo.cs       |  78 --
 csharp/src/Apache.Arrow.Flight/FlightLocation.cs   |  59 --
 csharp/src/Apache.Arrow.Flight/FlightPutResult.cs  |  64 --
 .../FlightRecordBatchStreamReader.cs               | 104 ---
 .../FlightRecordBatchStreamWriter.cs               |  77 --
 csharp/src/Apache.Arrow.Flight/FlightResult.cs     |  71 --
 csharp/src/Apache.Arrow.Flight/FlightTicket.cs     |  70 --
 .../Internal/FlightDataStream.cs                   | 109 ---
 .../Internal/FlightMessageSerializer.cs            |  57 --
 .../Internal/RecordBatcReaderImplementation.cs     | 131 ---
 .../Apache.Arrow.Flight/Internal/SchemaWriter.cs   |  55 --
 .../Apache.Arrow.Flight/Internal/StreamReader.cs   |  54 --
 .../Apache.Arrow.Flight/Internal/StreamWriter.cs   |  51 --
 .../Apache.Arrow.Flight/Properties/AssemblyInfo.cs |  18 -
 .../src/Apache.Arrow.Flight/Server/FlightServer.cs |  61 --
 .../Server/FlightServerRecordBatchStreamReader.cs  |  31 -
 .../Server/FlightServerRecordBatchStreamWriter.cs  |  31 -
 .../Server/Internal/FlightServerImplementation.cs  | 100 ---
 csharp/src/Apache.Arrow/Apache.Arrow.csproj        |  42 -
 csharp/src/Apache.Arrow/Arrays/Array.cs            |  91 ---
 csharp/src/Apache.Arrow/Arrays/ArrayData.cs        |  92 ---
 .../Arrays/ArrowArrayBuilderFactory.cs             |  79 --
 .../src/Apache.Arrow/Arrays/ArrowArrayFactory.cs   |  81 --
 .../src/Apache.Arrow/Arrays/ArrowArrayVisitor.cs   |  22 -
 csharp/src/Apache.Arrow/Arrays/BinaryArray.cs      | 358 ---------
 csharp/src/Apache.Arrow/Arrays/BooleanArray.cs     | 194 -----
 csharp/src/Apache.Arrow/Arrays/Date32Array.cs      | 112 ---
 csharp/src/Apache.Arrow/Arrays/Date64Array.cs      | 117 ---
 csharp/src/Apache.Arrow/Arrays/DateArrayBuilder.cs | 209 -----
 csharp/src/Apache.Arrow/Arrays/Decimal128Array.cs  |  95 ---
 csharp/src/Apache.Arrow/Arrays/Decimal256Array.cs  |  96 ---
 .../Apache.Arrow/Arrays/DelegatingArrayBuilder.cs  | 102 ---
 csharp/src/Apache.Arrow/Arrays/DoubleArray.cs      |  45 --
 .../Apache.Arrow/Arrays/FixedSizeBinaryArray.cs    | 196 -----
 csharp/src/Apache.Arrow/Arrays/FloatArray.cs       |  45 --
 csharp/src/Apache.Arrow/Arrays/Int16Array.cs       |  46 --
 csharp/src/Apache.Arrow/Arrays/Int32Array.cs       |  46 --
 csharp/src/Apache.Arrow/Arrays/Int64Array.cs       |  46 --
 csharp/src/Apache.Arrow/Arrays/Int8Array.cs        |  46 --
 csharp/src/Apache.Arrow/Arrays/ListArray.cs        | 200 -----
 csharp/src/Apache.Arrow/Arrays/PrimitiveArray.cs   |  70 --
 .../Apache.Arrow/Arrays/PrimitiveArrayBuilder.cs   | 201 -----
 csharp/src/Apache.Arrow/Arrays/StringArray.cs      |  95 ---
 csharp/src/Apache.Arrow/Arrays/StructArray.cs      |  59 --
 csharp/src/Apache.Arrow/Arrays/TimestampArray.cs   | 149 ----
 csharp/src/Apache.Arrow/Arrays/UInt16Array.cs      |  46 --
 csharp/src/Apache.Arrow/Arrays/UInt32Array.cs      |  46 --
 csharp/src/Apache.Arrow/Arrays/UInt64Array.cs      |  46 --
 csharp/src/Apache.Arrow/Arrays/UInt8Array.cs       |  45 --
 csharp/src/Apache.Arrow/Arrays/UnionArray.cs       |  51 --
 .../src/Apache.Arrow/ArrowBuffer.BitmapBuilder.cs  | 280 -------
 csharp/src/Apache.Arrow/ArrowBuffer.Builder.cs     | 255 ------
 csharp/src/Apache.Arrow/ArrowBuffer.cs             |  76 --
 csharp/src/Apache.Arrow/BitUtility.cs              | 204 -----
 csharp/src/Apache.Arrow/ChunkedArray.cs            |  91 ---
 csharp/src/Apache.Arrow/Column.cs                  |  73 --
 csharp/src/Apache.Arrow/DecimalUtility.cs          | 162 ----
 .../Apache.Arrow/Extensions/ArrayDataExtensions.cs |  45 --
 .../Apache.Arrow/Extensions/ArrayPoolExtensions.cs |  63 --
 .../Apache.Arrow/Extensions/ArrowTypeExtensions.cs |  42 -
 .../Apache.Arrow/Extensions/FlatbufExtensions.cs   |  85 --
 .../src/Apache.Arrow/Extensions/SpanExtensions.cs  |  31 -
 .../Apache.Arrow/Extensions/StreamExtensions.cs    |  70 --
 .../Extensions/StreamExtensions.netcoreapp2.1.cs   |  34 -
 .../Extensions/StreamExtensions.netstandard.cs     | 124 ---
 .../Apache.Arrow/Extensions/TimeSpanExtensions.cs  |  35 -
 .../Extensions/TupleExtensions.netstandard.cs      |  29 -
 csharp/src/Apache.Arrow/Field.Builder.cs           |  93 ---
 csharp/src/Apache.Arrow/Field.cs                   |  65 --
 csharp/src/Apache.Arrow/Flatbuf/Block.cs           |  37 -
 csharp/src/Apache.Arrow/Flatbuf/BodyCompression.cs |  47 --
 csharp/src/Apache.Arrow/Flatbuf/Buffer.cs          |  36 -
 csharp/src/Apache.Arrow/Flatbuf/DictionaryBatch.cs |  54 --
 .../src/Apache.Arrow/Flatbuf/DictionaryEncoding.cs |  57 --
 .../Flatbuf/Enums/BodyCompressionMethod.cs         |  24 -
 .../Apache.Arrow/Flatbuf/Enums/CompressionType.cs  |  15 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/DateUnit.cs  |  15 -
 .../src/Apache.Arrow/Flatbuf/Enums/Endianness.cs   |  17 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/Feature.cs   |  39 -
 .../src/Apache.Arrow/Flatbuf/Enums/IntervalUnit.cs |  15 -
 .../Apache.Arrow/Flatbuf/Enums/MessageHeader.cs    |  26 -
 .../Apache.Arrow/Flatbuf/Enums/MetadataVersion.cs  |  29 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/Precision.cs |  16 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/TimeUnit.cs  |  17 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/Type.cs      |  38 -
 csharp/src/Apache.Arrow/Flatbuf/Enums/UnionMode.cs |  15 -
 csharp/src/Apache.Arrow/Flatbuf/Field.cs           |  83 --
 csharp/src/Apache.Arrow/Flatbuf/FieldNode.cs       |  44 -
 csharp/src/Apache.Arrow/Flatbuf/FixedSizeBinary.cs |  39 -
 csharp/src/Apache.Arrow/Flatbuf/FixedSizeList.cs   |  39 -
 .../Apache.Arrow/Flatbuf/FlatBuffers/ByteBuffer.cs | 891 ---------------------
 .../Flatbuf/FlatBuffers/ByteBufferUtil.cs          |  39 -
 .../Flatbuf/FlatBuffers/FlatBufferBuilder.cs       | 812 -------------------
 .../Flatbuf/FlatBuffers/FlatBufferConstants.cs     |  29 -
 .../Flatbuf/FlatBuffers/IFlatbufferObject.cs       |  28 -
 .../src/Apache.Arrow/Flatbuf/FlatBuffers/Offset.cs |  48 --
 .../src/Apache.Arrow/Flatbuf/FlatBuffers/Struct.cs |  27 -
 .../src/Apache.Arrow/Flatbuf/FlatBuffers/Table.cs  | 195 -----
 csharp/src/Apache.Arrow/Flatbuf/Footer.cs          |  68 --
 csharp/src/Apache.Arrow/Flatbuf/KeyValue.cs        |  57 --
 csharp/src/Apache.Arrow/Flatbuf/Map.cs             |  63 --
 csharp/src/Apache.Arrow/Flatbuf/Message.cs         |  60 --
 csharp/src/Apache.Arrow/Flatbuf/RecordBatch.cs     |  67 --
 csharp/src/Apache.Arrow/Flatbuf/Schema.cs          |  76 --
 csharp/src/Apache.Arrow/Flatbuf/Tensor.cs          |  60 --
 csharp/src/Apache.Arrow/Flatbuf/TensorDim.cs       |  53 --
 csharp/src/Apache.Arrow/Flatbuf/Types/Binary.cs    |  29 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Bool.cs      |  29 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Date.cs      |  44 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Decimal.cs   |  54 --
 csharp/src/Apache.Arrow/Flatbuf/Types/Duration.cs  |  38 -
 .../Apache.Arrow/Flatbuf/Types/FloatingPoint.cs    |  38 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Int.cs       |  42 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Interval.cs  |  38 -
 .../src/Apache.Arrow/Flatbuf/Types/LargeBinary.cs  |  31 -
 csharp/src/Apache.Arrow/Flatbuf/Types/LargeList.cs |  31 -
 csharp/src/Apache.Arrow/Flatbuf/Types/LargeUtf8.cs |  31 -
 csharp/src/Apache.Arrow/Flatbuf/Types/List.cs      |  29 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Null.cs      |  30 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Struct_.cs   |  32 -
 csharp/src/Apache.Arrow/Flatbuf/Types/Time.cs      |  45 --
 csharp/src/Apache.Arrow/Flatbuf/Types/Timestamp.cs |  74 --
 csharp/src/Apache.Arrow/Flatbuf/Types/Union.cs     |  56 --
 csharp/src/Apache.Arrow/Flatbuf/Types/Utf8.cs      |  30 -
 csharp/src/Apache.Arrow/Interfaces/IArrowArray.cs  |  40 -
 .../Apache.Arrow/Interfaces/IArrowArrayBuilder.cs  |  54 --
 .../Apache.Arrow/Interfaces/IArrowArrayVisitor.cs  |  30 -
 csharp/src/Apache.Arrow/Ipc/ArrowFileConstants.cs  |  24 -
 csharp/src/Apache.Arrow/Ipc/ArrowFileReader.cs     |  69 --
 .../Ipc/ArrowFileReaderImplementation.cs           | 308 -------
 csharp/src/Apache.Arrow/Ipc/ArrowFileWriter.cs     | 281 -------
 csharp/src/Apache.Arrow/Ipc/ArrowFooter.cs         |  96 ---
 .../Ipc/ArrowMemoryReaderImplementation.cs         | 118 ---
 .../Apache.Arrow/Ipc/ArrowReaderImplementation.cs  | 290 -------
 csharp/src/Apache.Arrow/Ipc/ArrowStreamReader.cs   |  90 ---
 .../Ipc/ArrowStreamReaderImplementation.cs         | 262 ------
 csharp/src/Apache.Arrow/Ipc/ArrowStreamWriter.cs   | 726 -----------------
 .../Apache.Arrow/Ipc/ArrowTypeFlatbufferBuilder.cs | 249 ------
 csharp/src/Apache.Arrow/Ipc/Block.cs               |  40 -
 csharp/src/Apache.Arrow/Ipc/IArrowReader.cs        |  26 -
 csharp/src/Apache.Arrow/Ipc/IpcOptions.cs          |  37 -
 csharp/src/Apache.Arrow/Ipc/MessageSerializer.cs   | 177 ----
 .../Ipc/ReadOnlyMemoryBufferAllocator.cs           |  39 -
 csharp/src/Apache.Arrow/Memory/MemoryAllocator.cs  |  81 --
 .../Apache.Arrow/Memory/NativeMemoryAllocator.cs   |  51 --
 .../src/Apache.Arrow/Memory/NativeMemoryManager.cs |  83 --
 csharp/src/Apache.Arrow/Memory/NullMemoryOwner.cs  |  29 -
 csharp/src/Apache.Arrow/Properties/AssembyInfo.cs  |  18 -
 .../Apache.Arrow/Properties/Resources.Designer.cs  |  73 --
 csharp/src/Apache.Arrow/Properties/Resources.resx  | 123 ---
 csharp/src/Apache.Arrow/RecordBatch.Builder.cs     | 167 ----
 csharp/src/Apache.Arrow/RecordBatch.cs             |  88 --
 csharp/src/Apache.Arrow/Schema.Builder.cs          |  92 ---
 csharp/src/Apache.Arrow/Schema.cs                  | 125 ---
 csharp/src/Apache.Arrow/Table.cs                   | 113 ---
 csharp/src/Apache.Arrow/Types/ArrowType.cs         |  43 -
 csharp/src/Apache.Arrow/Types/BinaryType.cs        |  28 -
 csharp/src/Apache.Arrow/Types/BooleanType.cs       |  30 -
 csharp/src/Apache.Arrow/Types/Date32Type.cs        |  30 -
 csharp/src/Apache.Arrow/Types/Date64Type.cs        |  30 -
 csharp/src/Apache.Arrow/Types/DateType.cs          |  29 -
 csharp/src/Apache.Arrow/Types/Decimal128Type.cs    |  35 -
 csharp/src/Apache.Arrow/Types/Decimal256Type.cs    |  35 -
 csharp/src/Apache.Arrow/Types/DoubleType.cs        |  31 -
 .../src/Apache.Arrow/Types/FixedSizeBinaryType.cs  |  38 -
 csharp/src/Apache.Arrow/Types/FixedWidthType.cs    |  25 -
 csharp/src/Apache.Arrow/Types/FloatType.cs         |  31 -
 csharp/src/Apache.Arrow/Types/FloatingPointType.cs |  30 -
 csharp/src/Apache.Arrow/Types/HalfFloatType.cs     |  31 -
 csharp/src/Apache.Arrow/Types/IArrowType.cs        |  63 --
 csharp/src/Apache.Arrow/Types/IArrowTypeVisitor.cs |  29 -
 csharp/src/Apache.Arrow/Types/Int16Type.cs         |  29 -
 csharp/src/Apache.Arrow/Types/Int32Type.cs         |  29 -
 csharp/src/Apache.Arrow/Types/Int64Type.cs         |  29 -
 csharp/src/Apache.Arrow/Types/Int8Type.cs          |  30 -
 csharp/src/Apache.Arrow/Types/IntervalUnit.cs      |  40 -
 csharp/src/Apache.Arrow/Types/ListType.cs          |  37 -
 csharp/src/Apache.Arrow/Types/NestedType.cs        |  46 --
 csharp/src/Apache.Arrow/Types/NullType.cs          |  28 -
 csharp/src/Apache.Arrow/Types/NumberType.cs        |  23 -
 csharp/src/Apache.Arrow/Types/StringType.cs        |  28 -
 csharp/src/Apache.Arrow/Types/StructType.cs        |  61 --
 csharp/src/Apache.Arrow/Types/Time32Type.cs        |  32 -
 csharp/src/Apache.Arrow/Types/Time64Type.cs        |  32 -
 csharp/src/Apache.Arrow/Types/TimeType.cs          |  36 -
 csharp/src/Apache.Arrow/Types/TimestampType.cs     |  52 --
 csharp/src/Apache.Arrow/Types/UInt16Type.cs        |  29 -
 csharp/src/Apache.Arrow/Types/UInt32Type.cs        |  29 -
 csharp/src/Apache.Arrow/Types/UInt64Type.cs        |  29 -
 csharp/src/Apache.Arrow/Types/UInt8Type.cs         |  29 -
 csharp/src/Apache.Arrow/Types/UnionType.cs         |  46 --
 csharp/src/Apache.Arrow/Utility.cs                 |  87 --
 .../Apache.Arrow.Benchmarks.csproj                 |  18 -
 .../ArrowReaderBenchmark.cs                        | 160 ----
 .../ArrowWriterBenchmark.cs                        |  58 --
 csharp/test/Apache.Arrow.Benchmarks/Program.cs     |  29 -
 .../Apache.Arrow.Flight.TestWeb.csproj             |  15 -
 .../Extensions/AsyncStreamExtensions.cs            |  39 -
 .../Apache.Arrow.Flight.TestWeb/FlightHolder.cs    |  62 --
 .../Apache.Arrow.Flight.TestWeb/FlightStore.cs     |  27 -
 csharp/test/Apache.Arrow.Flight.TestWeb/Program.cs |  52 --
 .../Properties/launchSettings.json                 |  12 -
 .../RecordBatchWithMetadata.cs                     |  31 -
 csharp/test/Apache.Arrow.Flight.TestWeb/Startup.cs |  61 --
 .../TestFlightServer.cs                            | 116 ---
 .../appsettings.Development.json                   |  10 -
 .../Apache.Arrow.Flight.TestWeb/appsettings.json   |  15 -
 .../Apache.Arrow.Flight.Tests.csproj               |  21 -
 .../FlightInfoComparer.cs                          |  39 -
 .../test/Apache.Arrow.Flight.Tests/FlightTests.cs  | 316 --------
 .../Apache.Arrow.Flight.Tests/TestWebFactory.cs    |  79 --
 .../Apache.Arrow.Tests/Apache.Arrow.Tests.csproj   |  22 -
 .../test/Apache.Arrow.Tests/ArrayBuilderTests.cs   | 198 -----
 .../test/Apache.Arrow.Tests/ArrayTypeComparer.cs   | 121 ---
 csharp/test/Apache.Arrow.Tests/ArrowArrayTests.cs  | 274 -------
 .../ArrowBufferBitmapBuilderTests.cs               | 493 ------------
 .../Apache.Arrow.Tests/ArrowBufferBuilderTests.cs  | 216 -----
 csharp/test/Apache.Arrow.Tests/ArrowBufferTests.cs | 114 ---
 .../Apache.Arrow.Tests/ArrowFileReaderTests.cs     | 161 ----
 .../Apache.Arrow.Tests/ArrowFileWriterTests.cs     | 118 ---
 .../test/Apache.Arrow.Tests/ArrowReaderVerifier.cs | 222 -----
 .../Apache.Arrow.Tests/ArrowStreamReaderTests.cs   | 238 ------
 .../Apache.Arrow.Tests/ArrowStreamWriterTests.cs   | 498 ------------
 .../Apache.Arrow.Tests/BinaryArrayBuilderTests.cs  | 489 -----------
 csharp/test/Apache.Arrow.Tests/BitUtilityTests.cs  | 171 ----
 .../test/Apache.Arrow.Tests/BooleanArrayTests.cs   | 222 -----
 csharp/test/Apache.Arrow.Tests/ColumnTests.cs      |  58 --
 csharp/test/Apache.Arrow.Tests/Date32ArrayTests.cs | 125 ---
 csharp/test/Apache.Arrow.Tests/Date64ArrayTests.cs | 133 ---
 .../Apache.Arrow.Tests/Decimal128ArrayTests.cs     | 241 ------
 .../Apache.Arrow.Tests/Decimal256ArrayTests.cs     | 241 ------
 .../test/Apache.Arrow.Tests/DecimalUtilityTests.cs |  51 --
 .../Extensions/DateTimeOffsetExtensions.cs         |  40 -
 csharp/test/Apache.Arrow.Tests/FieldComparer.cs    |  44 -
 .../Fixtures/DefaultMemoryAllocatorFixture.cs      |  31 -
 .../test/Apache.Arrow.Tests/SchemaBuilderTests.cs  | 156 ----
 csharp/test/Apache.Arrow.Tests/SchemaComparer.cs   |  46 --
 csharp/test/Apache.Arrow.Tests/StructArrayTests.cs | 144 ----
 csharp/test/Apache.Arrow.Tests/TableTests.cs       |  83 --
 csharp/test/Apache.Arrow.Tests/TestData.cs         | 280 -------
 .../test/Apache.Arrow.Tests/TestDateAndTimeData.cs |  83 --
 .../test/Apache.Arrow.Tests/TestMemoryAllocator.cs |  29 -
 csharp/test/Apache.Arrow.Tests/TypeTests.cs        | 131 ---
 csharp/test/Directory.Build.props                  |  26 -
 270 files changed, 23986 deletions(-)

diff --git a/csharp/.editorconfig b/csharp/.editorconfig
deleted file mode 100644
index 01506a0..0000000
--- a/csharp/.editorconfig
+++ /dev/null
@@ -1,169 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-root = true
-
-# Default settings:
-# A newline ending every file
-# Use 4 spaces as indentation
-[*]
-insert_final_newline = true
-indent_style = space
-indent_size = 4
-trim_trailing_whitespace = true
-
-# C# files
-[*.cs]
-# New line preferences
-csharp_new_line_before_open_brace = all
-csharp_new_line_before_else = true
-csharp_new_line_before_catch = true
-csharp_new_line_before_finally = true
-csharp_new_line_before_members_in_object_initializers = true
-csharp_new_line_before_members_in_anonymous_types = true
-csharp_new_line_between_query_expression_clauses = true
-
-# Indentation preferences
-csharp_indent_block_contents = true
-csharp_indent_braces = false
-csharp_indent_case_contents = true
-csharp_indent_case_contents_when_block = true
-csharp_indent_switch_labels = true
-csharp_indent_labels = one_less_than_current
-
-# Modifier preferences
-csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async:suggestion
-
-# avoid this. unless absolutely necessary
-dotnet_style_qualification_for_field = false:suggestion
-dotnet_style_qualification_for_property = false:suggestion
-dotnet_style_qualification_for_method = false:suggestion
-dotnet_style_qualification_for_event = false:suggestion
-
-# Types: use keywords instead of BCL types, and permit var only when the type is clear
-csharp_style_var_for_built_in_types = false:suggestion
-csharp_style_var_when_type_is_apparent = false:none
-csharp_style_var_elsewhere = false:suggestion
-dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion
-dotnet_style_predefined_type_for_member_access = true:suggestion
-
-# name all constant fields using PascalCase
-dotnet_naming_rule.constant_fields_should_be_pascal_case.severity = suggestion
-dotnet_naming_rule.constant_fields_should_be_pascal_case.symbols  = constant_fields
-dotnet_naming_rule.constant_fields_should_be_pascal_case.style    = pascal_case_style
-dotnet_naming_symbols.constant_fields.applicable_kinds   = field
-dotnet_naming_symbols.constant_fields.required_modifiers = const
-dotnet_naming_style.pascal_case_style.capitalization = pascal_case
-
-# static fields should have s_ prefix
-dotnet_naming_rule.static_fields_should_have_prefix.severity = suggestion
-dotnet_naming_rule.static_fields_should_have_prefix.symbols  = static_fields
-dotnet_naming_rule.static_fields_should_have_prefix.style    = static_prefix_style
-dotnet_naming_symbols.static_fields.applicable_kinds   = field
-dotnet_naming_symbols.static_fields.required_modifiers = static
-dotnet_naming_symbols.static_fields.applicable_accessibilities = private, internal, private_protected
-dotnet_naming_style.static_prefix_style.required_prefix = s_
-dotnet_naming_style.static_prefix_style.capitalization = camel_case 
-
-# internal and private fields should be _camelCase
-dotnet_naming_rule.camel_case_for_private_internal_fields.severity = suggestion
-dotnet_naming_rule.camel_case_for_private_internal_fields.symbols  = private_internal_fields
-dotnet_naming_rule.camel_case_for_private_internal_fields.style    = camel_case_underscore_style
-dotnet_naming_symbols.private_internal_fields.applicable_kinds = field
-dotnet_naming_symbols.private_internal_fields.applicable_accessibilities = private, internal
-dotnet_naming_style.camel_case_underscore_style.required_prefix = _
-dotnet_naming_style.camel_case_underscore_style.capitalization = camel_case 
-
-# Code style defaults
-csharp_using_directive_placement = outside_namespace:suggestion
-dotnet_sort_system_directives_first = true
-csharp_prefer_braces = true:refactoring
-csharp_preserve_single_line_blocks = true:none
-csharp_preserve_single_line_statements = false:none
-csharp_prefer_static_local_function = true:suggestion
-csharp_prefer_simple_using_statement = false:none
-csharp_style_prefer_switch_expression = true:suggestion
-
-# Code quality
-dotnet_style_readonly_field = true:suggestion
-dotnet_code_quality_unused_parameters = non_public:suggestion
-
-# Expression-level preferences
-dotnet_style_object_initializer = true:suggestion
-dotnet_style_collection_initializer = true:suggestion
-dotnet_style_explicit_tuple_names = true:suggestion
-dotnet_style_coalesce_expression = true:suggestion
-dotnet_style_null_propagation = true:suggestion
-dotnet_style_prefer_is_null_check_over_reference_equality_method = true:suggestion
-dotnet_style_prefer_inferred_tuple_names = true:suggestion
-dotnet_style_prefer_inferred_anonymous_type_member_names = true:suggestion
-dotnet_style_prefer_auto_properties = true:suggestion
-dotnet_style_prefer_conditional_expression_over_assignment = true:refactoring
-dotnet_style_prefer_conditional_expression_over_return = true:refactoring
-csharp_prefer_simple_default_expression = true:suggestion
-
-# Expression-bodied members
-csharp_style_expression_bodied_methods = true:refactoring
-csharp_style_expression_bodied_constructors = true:refactoring
-csharp_style_expression_bodied_operators = true:refactoring
-csharp_style_expression_bodied_properties = true:refactoring
-csharp_style_expression_bodied_indexers = true:refactoring
-csharp_style_expression_bodied_accessors = true:refactoring
-csharp_style_expression_bodied_lambdas = true:refactoring
-csharp_style_expression_bodied_local_functions = true:refactoring
-
-# Pattern matching
-csharp_style_pattern_matching_over_is_with_cast_check = true:suggestion
-csharp_style_pattern_matching_over_as_with_null_check = true:suggestion
-csharp_style_inlined_variable_declaration = true:suggestion
-
-# Null checking preferences
-csharp_style_throw_expression = true:suggestion
-csharp_style_conditional_delegate_call = true:suggestion
-
-# Other features
-csharp_style_prefer_index_operator = false:none
-csharp_style_prefer_range_operator = false:none
-csharp_style_pattern_local_over_anonymous_function = false:none
-
-# Space preferences
-csharp_space_after_cast = false
-csharp_space_after_colon_in_inheritance_clause = true
-csharp_space_after_comma = true
-csharp_space_after_dot = false
-csharp_space_after_keywords_in_control_flow_statements = true
-csharp_space_after_semicolon_in_for_statement = true
-csharp_space_around_binary_operators = before_and_after
-csharp_space_around_declaration_statements = do_not_ignore
-csharp_space_before_colon_in_inheritance_clause = true
-csharp_space_before_comma = false
-csharp_space_before_dot = false
-csharp_space_before_open_square_brackets = false
-csharp_space_before_semicolon_in_for_statement = false
-csharp_space_between_empty_square_brackets = false
-csharp_space_between_method_call_empty_parameter_list_parentheses = false
-csharp_space_between_method_call_name_and_opening_parenthesis = false
-csharp_space_between_method_call_parameter_list_parentheses = false
-csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
-csharp_space_between_method_declaration_name_and_open_parenthesis = false
-csharp_space_between_method_declaration_parameter_list_parentheses = false
-csharp_space_between_parentheses = false
-csharp_space_between_square_brackets = false
-
-# Xml project files
-[*.{csproj,props,targets}]
-indent_size = 2
-charset = utf-8
diff --git a/csharp/.gitattributes b/csharp/.gitattributes
deleted file mode 100644
index d2ff52b..0000000
--- a/csharp/.gitattributes
+++ /dev/null
@@ -1,36 +0,0 @@
-###############################################################################
-# Set default behavior to automatically normalize line endings.
-###############################################################################
-* text=auto
-
-###############################################################################
-# Set default behavior for command prompt diff.
-#
-# This is need for earlier builds of msysgit that does not have it on by
-# default for csharp files.
-# Note: This is only used by command line
-###############################################################################
-#*.cs     diff=csharp
-
-###############################################################################
-# Set the merge driver for project and solution files
-#
-# Merging from the command prompt will add diff markers to the files if there
-# are conflicts (Merging from VS is not affected by the settings below, in VS
-# the diff markers are never inserted). Diff markers may cause the following
-# file extensions to fail to load in VS. An alternative would be to treat
-# these files as binary and thus will always conflict and require user
-# intervention with every merge. To do so, just uncomment the entries below
-###############################################################################
-#*.sln       merge=binary
-#*.csproj    merge=binary
-#*.vbproj    merge=binary
-#*.vcxproj   merge=binary
-#*.vcproj    merge=binary
-#*.dbproj    merge=binary
-#*.fsproj    merge=binary
-#*.lsproj    merge=binary
-#*.wixproj   merge=binary
-#*.modelproj merge=binary
-#*.sqlproj   merge=binary
-#*.wwaproj   merge=binary
\ No newline at end of file
diff --git a/csharp/.gitignore b/csharp/.gitignore
deleted file mode 100644
index a9fbd58..0000000
--- a/csharp/.gitignore
+++ /dev/null
@@ -1,267 +0,0 @@
-## Ignore Visual Studio temporary files, build results, and
-## files generated by popular Visual Studio add-ons.
-
-# User-specific files
-*.suo
-*.user
-*.userosscache
-*.sln.docstates
-
-# User-specific files (MonoDevelop/Xamarin Studio)
-*.userprefs
-
-# Build results
-[Dd]ebug/
-[Dd]ebugPublic/
-[Rr]elease/
-[Rr]eleases/
-x64/
-x86/
-bld/
-[Bb]in/
-[Oo]bj/
-[Ll]og/
-
-# Visual Studio 2015 cache/options directory
-.vs/
-# Uncomment if you have tasks that create the project's static files in wwwroot
-#wwwroot/
-
-# MSTest test Results
-[Tt]est[Rr]esult*/
-[Bb]uild[Ll]og.*
-
-# NUNIT
-*.VisualState.xml
-TestResult.xml
-
-# Build Results of an ATL Project
-[Dd]ebugPS/
-[Rr]eleasePS/
-dlldata.c
-
-# DNX
-project.lock.json
-project.fragment.lock.json
-artifacts/
-
-*_i.c
-*_p.c
-*_i.h
-*.ilk
-*.meta
-*.obj
-*.pch
-*.pdb
-*.pgc
-*.pgd
-*.rsp
-*.sbr
-*.tlb
-*.tli
-*.tlh
-*.tmp
-*.tmp_proj
-*.log
-*.vspscc
-*.vssscc
-.builds
-*.pidb
-*.svclog
-*.scc
-
-# Chutzpah Test files
-_Chutzpah*
-
-# Visual C++ cache files
-ipch/
-*.aps
-*.ncb
-*.opendb
-*.opensdf
-*.sdf
-*.cachefile
-*.VC.db
-*.VC.VC.opendb
-
-# Visual Studio profiler
-*.psess
-*.vsp
-*.vspx
-*.sap
-
-# TFS 2012 Local Workspace
-$tf/
-
-# Guidance Automation Toolkit
-*.gpState
-
-# ReSharper is a .NET coding add-in
-_ReSharper*/
-*.[Rr]e[Ss]harper
-*.DotSettings.user
-
-# JustCode is a .NET coding add-in
-.JustCode
-
-# TeamCity is a build add-in
-_TeamCity*
-
-# DotCover is a Code Coverage Tool
-*.dotCover
-
-# NCrunch
-_NCrunch_*
-.*crunch*.local.xml
-nCrunchTemp_*
-
-# MightyMoose
-*.mm.*
-AutoTest.Net/
-
-# Web workbench (sass)
-.sass-cache/
-
-# Installshield output folder
-[Ee]xpress/
-
-# DocProject is a documentation generator add-in
-DocProject/buildhelp/
-DocProject/Help/*.HxT
-DocProject/Help/*.HxC
-DocProject/Help/*.hhc
-DocProject/Help/*.hhk
-DocProject/Help/*.hhp
-DocProject/Help/Html2
-DocProject/Help/html
-
-# Click-Once directory
-publish/
-
-# Publish Web Output
-*.[Pp]ublish.xml
-*.azurePubxml
-# TODO: Comment the next line if you want to checkin your web deploy settings
-# but database connection strings (with potential passwords) will be unencrypted
-#*.pubxml
-*.publishproj
-
-# Microsoft Azure Web App publish settings. Comment the next line if you want to
-# checkin your Azure Web App publish settings, but sensitive information contained
-# in these scripts will be unencrypted
-PublishScripts/
-
-# NuGet Packages
-*.nupkg
-# The packages folder can be ignored because of Package Restore
-**/packages/*
-# except build/, which is used as an MSBuild target.
-!**/packages/build/
-# Uncomment if necessary however generally it will be regenerated when needed
-#!**/packages/repositories.config
-# NuGet v3's project.json files produces more ignorable files
-*.nuget.props
-*.nuget.targets
-
-# Microsoft Azure Build Output
-csx/
-*.build.csdef
-
-# Microsoft Azure Emulator
-ecf/
-rcf/
-
-# Windows Store app package directories and files
-AppPackages/
-BundleArtifacts/
-Package.StoreAssociation.xml
-_pkginfo.txt
-
-# Visual Studio cache files
-# files ending in .cache can be ignored
-*.[Cc]ache
-# but keep track of directories ending in .cache
-!*.[Cc]ache/
-
-# Others
-ClientBin/
-~$*
-*~
-*.dbmdl
-*.dbproj.schemaview
-*.jfm
-*.pfx
-*.publishsettings
-node_modules/
-orleans.codegen.cs
-
-# Since there are multiple workflows, uncomment next line to ignore bower_components
-# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
-#bower_components/
-
-# RIA/Silverlight projects
-Generated_Code/
-
-# Backup & report files from converting an old project file
-# to a newer Visual Studio version. Backup files are not needed,
-# because we have git ;-)
-_UpgradeReport_Files/
-Backup*/
-UpgradeLog*.XML
-UpgradeLog*.htm
-
-# SQL Server files
-*.mdf
-*.ldf
-
-# Business Intelligence projects
-*.rdl.data
-*.bim.layout
-*.bim_*.settings
-
-# Microsoft Fakes
-FakesAssemblies/
-
-# GhostDoc plugin setting file
-*.GhostDoc.xml
-
-# Node.js Tools for Visual Studio
-.ntvs_analysis.dat
-
-# Visual Studio 6 build log
-*.plg
-
-# Visual Studio 6 workspace options file
-*.opt
-
-# Visual Studio LightSwitch build output
-**/*.HTMLClient/GeneratedArtifacts
-**/*.DesktopClient/GeneratedArtifacts
-**/*.DesktopClient/ModelManifest.xml
-**/*.Server/GeneratedArtifacts
-**/*.Server/ModelManifest.xml
-_Pvt_Extensions
-
-# Paket dependency manager
-.paket/paket.exe
-paket-files/
-
-# FAKE - F# Make
-.fake/
-
-# JetBrains Rider
-.idea/
-*.sln.iml
-
-# CodeRush
-.cr/
-
-# Python Tools for Visual Studio (PTVS)
-__pycache__/
-*.pyc
-
-# Project-specific
-artifacts/
-
-# add .sln files back because they are ignored by the root .gitignore file
-!*.sln
diff --git a/csharp/Apache.Arrow.sln b/csharp/Apache.Arrow.sln
deleted file mode 100644
index 8498c8a..0000000
--- a/csharp/Apache.Arrow.sln
+++ /dev/null
@@ -1,61 +0,0 @@
-
-Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio Version 16
-VisualStudioVersion = 16.0.29926.136
-MinimumVisualStudioVersion = 10.0.40219.1
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Apache.Arrow", "src\Apache.Arrow\Apache.Arrow.csproj", "{BA6B2B0D-EAAE-4183-8A39-1B9CF571F71F}"
-EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Apache.Arrow.Tests", "test\Apache.Arrow.Tests\Apache.Arrow.Tests.csproj", "{9CCEC01B-E67A-4726-BE72-7B514F76163F}"
-EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Apache.Arrow.Benchmarks", "test\Apache.Arrow.Benchmarks\Apache.Arrow.Benchmarks.csproj", "{742DF47D-77C5-4B84-9E0C-69645F1161EA}"
-EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Apache.Arrow.Flight.Tests", "test\Apache.Arrow.Flight.Tests\Apache.Arrow.Flight.Tests.csproj", "{D6443535-3740-4F6C-8001-F90EDAF4CF0C}"
-EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Apache.Arrow.Flight.TestWeb", "test\Apache.Arrow.Flight.TestWeb\Apache.Arrow.Flight.TestWeb.csproj", "{058F9CFA-2A13-43B8-87D9-E69F63F9EFF0}"
-EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Apache.Arrow.Flight", "src\Apache.Arrow.Flight\Apache.Arrow.Flight.csproj", "{2490AA1E-DDA4-4069-B065-79A4897B0582}"
-EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Apache.Arrow.Flight.AspNetCore", "src\Apache.Arrow.Flight.AspNetCore\Apache.Arrow.Flight.AspNetCore.csproj", "{E4F74938-E8FF-4AC1-A495-FEE95FC1EFDF}"
-EndProject
-Global
-	GlobalSection(SolutionConfigurationPlatforms) = preSolution
-		Debug|Any CPU = Debug|Any CPU
-		Release|Any CPU = Release|Any CPU
-	EndGlobalSection
-	GlobalSection(ProjectConfigurationPlatforms) = postSolution
-		{BA6B2B0D-EAAE-4183-8A39-1B9CF571F71F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{BA6B2B0D-EAAE-4183-8A39-1B9CF571F71F}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{BA6B2B0D-EAAE-4183-8A39-1B9CF571F71F}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{BA6B2B0D-EAAE-4183-8A39-1B9CF571F71F}.Release|Any CPU.Build.0 = Release|Any CPU
-		{9CCEC01B-E67A-4726-BE72-7B514F76163F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{9CCEC01B-E67A-4726-BE72-7B514F76163F}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{9CCEC01B-E67A-4726-BE72-7B514F76163F}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{9CCEC01B-E67A-4726-BE72-7B514F76163F}.Release|Any CPU.Build.0 = Release|Any CPU
-		{742DF47D-77C5-4B84-9E0C-69645F1161EA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{742DF47D-77C5-4B84-9E0C-69645F1161EA}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{742DF47D-77C5-4B84-9E0C-69645F1161EA}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{742DF47D-77C5-4B84-9E0C-69645F1161EA}.Release|Any CPU.Build.0 = Release|Any CPU
-		{D6443535-3740-4F6C-8001-F90EDAF4CF0C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{D6443535-3740-4F6C-8001-F90EDAF4CF0C}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{D6443535-3740-4F6C-8001-F90EDAF4CF0C}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{D6443535-3740-4F6C-8001-F90EDAF4CF0C}.Release|Any CPU.Build.0 = Release|Any CPU
-		{058F9CFA-2A13-43B8-87D9-E69F63F9EFF0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{058F9CFA-2A13-43B8-87D9-E69F63F9EFF0}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{058F9CFA-2A13-43B8-87D9-E69F63F9EFF0}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{058F9CFA-2A13-43B8-87D9-E69F63F9EFF0}.Release|Any CPU.Build.0 = Release|Any CPU
-		{2490AA1E-DDA4-4069-B065-79A4897B0582}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{2490AA1E-DDA4-4069-B065-79A4897B0582}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{2490AA1E-DDA4-4069-B065-79A4897B0582}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{2490AA1E-DDA4-4069-B065-79A4897B0582}.Release|Any CPU.Build.0 = Release|Any CPU
-		{E4F74938-E8FF-4AC1-A495-FEE95FC1EFDF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{E4F74938-E8FF-4AC1-A495-FEE95FC1EFDF}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{E4F74938-E8FF-4AC1-A495-FEE95FC1EFDF}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{E4F74938-E8FF-4AC1-A495-FEE95FC1EFDF}.Release|Any CPU.Build.0 = Release|Any CPU
-	EndGlobalSection
-	GlobalSection(SolutionProperties) = preSolution
-		HideSolutionNode = FALSE
-	EndGlobalSection
-	GlobalSection(ExtensibilityGlobals) = postSolution
-		SolutionGuid = {FD0BB617-6031-4844-B99D-B331E335B572}
-	EndGlobalSection
-EndGlobal
diff --git a/csharp/ApacheArrow.snk b/csharp/ApacheArrow.snk
deleted file mode 100644
index 68df439..0000000
Binary files a/csharp/ApacheArrow.snk and /dev/null differ
diff --git a/csharp/Directory.Build.props b/csharp/Directory.Build.props
deleted file mode 100644
index 3ee2af7..0000000
--- a/csharp/Directory.Build.props
+++ /dev/null
@@ -1,59 +0,0 @@
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements. See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-
-<Project>
-
-  <!-- Common repo directories -->
-  <PropertyGroup>
-    <RepoRoot>$(MSBuildThisFileDirectory)../</RepoRoot>
-    <CSharpDir>$(MSBuildThisFileDirectory)</CSharpDir>
-    <BaseOutputPath>$(CSharpDir)/artifacts/$(MSBuildProjectName)</BaseOutputPath>
-  </PropertyGroup>
-
-  <!-- AssemblyInfo properties -->
-  <PropertyGroup>
-    <Product>Apache Arrow library</Product>
-    <Copyright>Copyright 2016-2019 The Apache Software Foundation</Copyright>
-    <Company>The Apache Software Foundation</Company>
-    <Version>4.0.0-SNAPSHOT</Version>
-  </PropertyGroup>
-
-  <PropertyGroup>
-    <EmbedUntrackedSources>true</EmbedUntrackedSources>
-    <LangVersion>8.0</LangVersion>
-    <SignAssembly>true</SignAssembly>
-    <AssemblyOriginatorKeyFile>$(CSharpDir)ApacheArrow.snk</AssemblyOriginatorKeyFile>
-  </PropertyGroup>
-
-  <!-- NuGet properties -->
-  <PropertyGroup>
-    <Authors>The Apache Software Foundation</Authors>
-    <PackageIconUrl>https://www.apache.org/images/feather.png</PackageIconUrl>
-    <PackageLicenseFile>LICENSE.txt</PackageLicenseFile>
-    <PackageProjectUrl>https://arrow.apache.org/</PackageProjectUrl>
-    <PackageTags>apache arrow</PackageTags>
-    <RepositoryType>git</RepositoryType>
-    <RepositoryUrl>https://github.com/apache/arrow</RepositoryUrl>
-    <IncludeSymbols>true</IncludeSymbols>
-    <SymbolPackageFormat>snupkg</SymbolPackageFormat>
-  </PropertyGroup>
-
-  <ItemGroup Condition="'$(IsPackable)' == 'true'">
-    <Content Include="$(RepoRoot)LICENSE.txt" Pack="true" PackagePath="" />
-  </ItemGroup>
-
-</Project>
diff --git a/csharp/Directory.Build.targets b/csharp/Directory.Build.targets
deleted file mode 100644
index 498c752..0000000
--- a/csharp/Directory.Build.targets
+++ /dev/null
@@ -1,29 +0,0 @@
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements. See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-
-<Project>
-
-  <!-- The following works around https://github.com/dotnet/sourcelink/issues/572  -->
-  <PropertyGroup>
-    <TargetFrameworkMonikerAssemblyAttributesPath>$([System.IO.Path]::Combine('$(IntermediateOutputPath)','$(TargetFrameworkMoniker).AssemblyAttributes$(DefaultLanguageSourceExtension)'))</TargetFrameworkMonikerAssemblyAttributesPath>
-  </PropertyGroup>
-  <ItemGroup>
-    <EmbeddedFiles Include="$(GeneratedAssemblyInfoFile)"/>
-    <EmbeddedFiles Include="$(TargetFrameworkMonikerAssemblyAttributesPath)"/>
-  </ItemGroup>
-  
-</Project>
diff --git a/csharp/README.md b/csharp/README.md
deleted file mode 100644
index 2a60cd2..0000000
--- a/csharp/README.md
+++ /dev/null
@@ -1,184 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Apache Arrow
-
-An implementation of Arrow targeting .NET Standard.
-
-This implementation is under development and may not be suitable for use in production environments.
-
-# Implementation
-
-- Arrow 0.11 (specification)
-- C# 7.2
-- .NET Standard 1.3
-- Asynchronous I/O
-- Uses modern .NET runtime features such as **Span&lt;T&gt;**, **Memory&lt;T&gt;**, **MemoryManager&lt;T&gt;**, and **System.Buffers** primitives for memory allocation, memory storage, and fast serialization.
-- Uses **Acyclic Visitor Pattern** for array types and arrays to facilitate serialization, record batch traversal, and format growth.
-
-# Known Issues
-
-- Can not read Arrow files containing dictionary batches, tensors, or tables.
-- Can not easily modify allocation strategy without implementing a custom memory pool. All allocations are currently 64-byte aligned and padded to 8-bytes.
-- Default memory allocation strategy uses an over-allocation strategy with pointer fixing, which results in significant memory overhead for small buffers. A buffer that requires a single byte for storage may be backed by an allocation of up to 64-bytes to satisfy alignment requirements.
-- There are currently few builder APIs available for specific array types. Arrays must be built manually with an arrow buffer builder abstraction.
-- FlatBuffer code generation is not included in the build process.
-- Serialization implementation does not perform exhaustive validation checks during deserialization in every scenario.
-- Throws exceptions with vague, inconsistent, or non-localized messages in many situations
-- Throws exceptions that are non-specific to the Arrow implementation in some circumstances where it probably should (eg. does not throw ArrowException exceptions)
-- Lack of code documentation
-- Lack of usage examples
-- Lack of comprehensive unit tests
-- Lack of comprehensive benchmarks
-
-# Usage
-
-	using System.Diagnostics;
-	using System.IO;
-	using System.Threading.Tasks;
-	using Apache.Arrow;
-	using Apache.Arrow.Ipc;
-
-    public static async Task<RecordBatch> ReadArrowAsync(string filename)
-    {
-        using (var stream = File.OpenRead("test.arrow"))
-        using (var reader = new ArrowFileReader(stream))
-        {
-            var recordBatch = await reader.ReadNextRecordBatchAsync();
-            Debug.WriteLine("Read record batch with {0} column(s)", recordBatch.ColumnCount);
-            return recordBatch;
-        }
-    }
-
-
-# Status
-
-## Memory Management
-
-- Allocations are 64-byte aligned and padded to 8-bytes.
-- Allocations are automatically garbage collected
-
-## Arrays
-
-### Primitive Types
-
-- Int8, Int16, Int32, Int64
-- UInt8, UInt16, UInt32, UInt64
-- Float, Double
-- Binary (variable-length)
-- String (utf-8)
-- Null
-
-### Parametric Types
-
-- Timestamp
-- Date32
-- Date64
-- Decimal
-- Time32
-- Time64
-- Binary (fixed-length)
-- List
-- Struct
-
-### Type Metadata
-
-- Data Types
-- Fields
-- Schema
-
-### Serialization
-
-- File
-- Stream
-
-## Not Implemented
-
-- Serialization
-    - Exhaustive validation
-    - Dictionary Batch
-        - Can not serialize or deserialize files or streams containing dictionary batches
-    - Dictionary Encoding
-	- Schema Metadata
-	- Schema Field Metadata
-- Types
-    - Tensor
-    - Table
-- Arrays
-    - Union
-        - Dense
-        - Sparse
-    - Half-Float
-    - Dictionary
-- Array Operations
-	- Equality / Comparison
-	- Casting
-	- Builders
-- Compute
-    - There is currently no API available for a compute / kernel abstraction.
-
-# Build
-
-Install the latest `.NET Core SDK` from https://dotnet.microsoft.com/download.
-
-    dotnet build
-
-## NuGet Build
-
-To build the NuGet package run the following command to build a debug flavor, preview package into the **artifacts** folder.
-
-    dotnet pack
-
-When building the officially released version run: (see Note below about current `git` repository)
-
-    dotnet pack -c Release
-
-Which will build the final/stable package.
-
-NOTE: When building the officially released version, ensure that your `git` repository has the `origin` remote set to `https://github.com/apache/arrow.git`, which will ensure Source Link is set correctly. See https://github.com/dotnet/sourcelink/blob/master/docs/README.md for more information.
-
-There are two output artifacts:
-1. `Apache.Arrow.<version>.nupkg` - this contains the executable assemblies
-2. `Apache.Arrow.<version>.snupkg` - this contains the debug symbols files
-
-Both of these artifacts can then be uploaded to https://www.nuget.org/packages/manage/upload.
-
-## Docker Build
-
-Build from the Apache Arrow project root.
-
-    docker build -f csharp/build/docker/Dockerfile .
-
-## Testing
-
-	dotnet test
-
-All build artifacts are placed in the **artifacts** folder in the project root.
-
-# Coding Style
-
-This project follows the coding style specified in [Coding Style](https://github.com/dotnet/runtime/blob/master/docs/coding-guidelines/coding-style.md).
-
-# Updating FlatBuffers code
-
-See https://google.github.io/flatbuffers/flatbuffers_guide_use_java_c-sharp.html for how to get the `flatc` executable.
-
-Run `flatc --csharp` on each `.fbs` file in the [format](../format) folder. And replace the checked in `.cs` files under [FlatBuf](src/Apache.Arrow/Flatbuf) with the generated files.
-
-Update the non-generated [FlatBuffers](src/Apache.Arrow/Flatbuf/FlatBuffers) `.cs` files with the files from the [google/flatbuffers repo](https://github.com/google/flatbuffers/tree/master/net/FlatBuffers).
diff --git a/csharp/examples/Examples.sln b/csharp/examples/Examples.sln
deleted file mode 100644
index c0a4199..0000000
--- a/csharp/examples/Examples.sln
+++ /dev/null
@@ -1,31 +0,0 @@
-
-Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio 15
-VisualStudioVersion = 15.0.27703.2042
-MinimumVisualStudioVersion = 10.0.40219.1
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FluentBuilderExample", "FluentBuilderExample\FluentBuilderExample.csproj", "{ECE22119-D91D-44F7-9575-85B98F946289}"
-EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Apache.Arrow", "..\src\Apache.Arrow\Apache.Arrow.csproj", "{1FE1DE95-FF6E-4895-82E7-909713C53524}"
-EndProject
-Global
-	GlobalSection(SolutionConfigurationPlatforms) = preSolution
-		Debug|Any CPU = Debug|Any CPU
-		Release|Any CPU = Release|Any CPU
-	EndGlobalSection
-	GlobalSection(ProjectConfigurationPlatforms) = postSolution
-		{ECE22119-D91D-44F7-9575-85B98F946289}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{ECE22119-D91D-44F7-9575-85B98F946289}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{ECE22119-D91D-44F7-9575-85B98F946289}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{ECE22119-D91D-44F7-9575-85B98F946289}.Release|Any CPU.Build.0 = Release|Any CPU
-		{1FE1DE95-FF6E-4895-82E7-909713C53524}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{1FE1DE95-FF6E-4895-82E7-909713C53524}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{1FE1DE95-FF6E-4895-82E7-909713C53524}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{1FE1DE95-FF6E-4895-82E7-909713C53524}.Release|Any CPU.Build.0 = Release|Any CPU
-	EndGlobalSection
-	GlobalSection(SolutionProperties) = preSolution
-		HideSolutionNode = FALSE
-	EndGlobalSection
-	GlobalSection(ExtensibilityGlobals) = postSolution
-		SolutionGuid = {C22A81AD-8B64-4D7C-97AC-49E9F118AE78}
-	EndGlobalSection
-EndGlobal
diff --git a/csharp/examples/FluentBuilderExample/FluentBuilderExample.csproj b/csharp/examples/FluentBuilderExample/FluentBuilderExample.csproj
deleted file mode 100644
index 575a274..0000000
--- a/csharp/examples/FluentBuilderExample/FluentBuilderExample.csproj
+++ /dev/null
@@ -1,12 +0,0 @@
-<Project Sdk="Microsoft.NET.Sdk">
-  
-  <PropertyGroup>
-    <OutputType>Exe</OutputType>
-    <TargetFramework>netcoreapp2.1</TargetFramework>
-  </PropertyGroup>
-
-  <ItemGroup>
-    <ProjectReference Include="..\..\src\Apache.Arrow\Apache.Arrow.csproj" />
-  </ItemGroup>
-
-</Project>
\ No newline at end of file
diff --git a/csharp/examples/FluentBuilderExample/Program.cs b/csharp/examples/FluentBuilderExample/Program.cs
deleted file mode 100644
index a55f841..0000000
--- a/csharp/examples/FluentBuilderExample/Program.cs
+++ /dev/null
@@ -1,61 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow;
-using Apache.Arrow.Ipc;
-using Apache.Arrow.Memory;
-using System;
-using System.IO;
-using System.Linq;
-using System.Threading.Tasks;
-
-namespace FluentBuilderExample
-{
-    public class Program
-    {
-        public static async Task Main(string[] args)
-        {
-            // Use a specific memory pool from which arrays will be allocated (optional)
-
-            var memoryAllocator = new NativeMemoryAllocator(alignment: 64);
-
-            // Build a record batch using the Fluent API
-
-            var recordBatch = new RecordBatch.Builder(memoryAllocator)
-                .Append("Column A", false, col => col.Int32(array => array.AppendRange(Enumerable.Range(0, 10))))
-                .Append("Column B", false, col => col.Float(array => array.AppendRange(Enumerable.Range(0, 10).Select(x => Convert.ToSingle(x * 2)))))
-                .Append("Column C", false, col => col.String(array => array.AppendRange(Enumerable.Range(0, 10).Select(x => $"Item {x+1}"))))
-                .Append("Column D", false, col => col.Boolean(array => array.AppendRange(Enumerable.Range(0, 10).Select(x => x % 2 == 0))))
-                .Build();
-
-            // Print memory allocation statistics
-
-            Console.WriteLine("Allocations: {0}", memoryAllocator.Statistics.Allocations);
-            Console.WriteLine("Allocated: {0} byte(s)", memoryAllocator.Statistics.BytesAllocated);
-
-            // Write record batch to a file
-
-            using (var stream = File.OpenWrite("test.arrow"))
-            using (var writer = new ArrowFileWriter(stream, recordBatch.Schema))
-            {
-                await writer.WriteRecordBatchAsync(recordBatch);
-                await writer.WriteFooterAsync();
-            }
-
-            Console.WriteLine("Done");
-            Console.ReadKey();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight.AspNetCore/Apache.Arrow.Flight.AspNetCore.csproj b/csharp/src/Apache.Arrow.Flight.AspNetCore/Apache.Arrow.Flight.AspNetCore.csproj
deleted file mode 100644
index 7cfa33c..0000000
--- a/csharp/src/Apache.Arrow.Flight.AspNetCore/Apache.Arrow.Flight.AspNetCore.csproj
+++ /dev/null
@@ -1,15 +0,0 @@
-<Project Sdk="Microsoft.NET.Sdk">
-
-  <PropertyGroup>
-    <TargetFramework>netcoreapp3.1</TargetFramework>
-  </PropertyGroup>
-
-  <ItemGroup>
-    <PackageReference Include="Grpc.AspNetCore.Server" Version="2.33.1" />
-  </ItemGroup>
-
-  <ItemGroup>
-    <ProjectReference Include="..\Apache.Arrow.Flight\Apache.Arrow.Flight.csproj" />
-  </ItemGroup>
-
-</Project>
diff --git a/csharp/src/Apache.Arrow.Flight.AspNetCore/Extensions/FlightIEndpointRouteBuilderExtensions.cs b/csharp/src/Apache.Arrow.Flight.AspNetCore/Extensions/FlightIEndpointRouteBuilderExtensions.cs
deleted file mode 100644
index 5902d7b..0000000
--- a/csharp/src/Apache.Arrow.Flight.AspNetCore/Extensions/FlightIEndpointRouteBuilderExtensions.cs
+++ /dev/null
@@ -1,28 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Flight.Server.Internal;
-using Microsoft.AspNetCore.Routing;
-
-namespace Microsoft.AspNetCore.Builder
-{
-    public static class FlightIEndpointRouteBuilderExtensions
-    {
-        public static GrpcServiceEndpointConventionBuilder MapFlightEndpoint(this IEndpointRouteBuilder endpointRouteBuilder)
-        {
-            return endpointRouteBuilder.MapGrpcService<FlightServerImplementation>();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight.AspNetCore/Extensions/FlightIGrpcServerBuilderExtensions.cs b/csharp/src/Apache.Arrow.Flight.AspNetCore/Extensions/FlightIGrpcServerBuilderExtensions.cs
deleted file mode 100644
index 692e86f..0000000
--- a/csharp/src/Apache.Arrow.Flight.AspNetCore/Extensions/FlightIGrpcServerBuilderExtensions.cs
+++ /dev/null
@@ -1,30 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Flight.Server;
-using Grpc.AspNetCore.Server;
-
-namespace Microsoft.Extensions.DependencyInjection
-{
-    public static class FlightIGrpcServerBuilderExtensions
-    {
-        public static IGrpcServerBuilder AddFlightServer<T>(this IGrpcServerBuilder grpcServerBuilder)
-            where T : FlightServer
-        {
-            grpcServerBuilder.Services.AddScoped<FlightServer, T>();
-            return grpcServerBuilder;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Apache.Arrow.Flight.csproj b/csharp/src/Apache.Arrow.Flight/Apache.Arrow.Flight.csproj
deleted file mode 100644
index bd59268..0000000
--- a/csharp/src/Apache.Arrow.Flight/Apache.Arrow.Flight.csproj
+++ /dev/null
@@ -1,21 +0,0 @@
-<Project Sdk="Microsoft.NET.Sdk">
-
-  <PropertyGroup>
-    <TargetFramework>netstandard2.1</TargetFramework>
-  </PropertyGroup>
-  
-  <ItemGroup>
-    <PackageReference Include="Google.Protobuf" Version="3.14.0" />
-    <PackageReference Include="Grpc.Net.Client" Version="2.33.1" />
-    <PackageReference Include="Grpc.Tools" Version="2.33.1" PrivateAssets="All" />
-  </ItemGroup>
-
-  <ItemGroup>
-    <ProjectReference Include="..\Apache.Arrow\Apache.Arrow.csproj" />
-  </ItemGroup>
-
-  <ItemGroup>
-    <Protobuf Include="..\..\..\format\Flight.proto" Access="internal" />
-  </ItemGroup>
-
-</Project>
diff --git a/csharp/src/Apache.Arrow.Flight/Client/FlightClient.cs b/csharp/src/Apache.Arrow.Flight/Client/FlightClient.cs
deleted file mode 100644
index 8140e06..0000000
--- a/csharp/src/Apache.Arrow.Flight/Client/FlightClient.cs
+++ /dev/null
@@ -1,120 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Threading.Tasks;
-using Apache.Arrow.Flight.Internal;
-using Apache.Arrow.Flight.Protocol;
-using Grpc.Core;
-using Grpc.Net.Client;
-
-namespace Apache.Arrow.Flight.Client
-{
-    public class FlightClient
-    {
-        internal static readonly Empty EmptyInstance = new Empty();
-
-        private readonly FlightService.FlightServiceClient _client;
-
-        public FlightClient(GrpcChannel grpcChannel)
-        {
-            _client = new FlightService.FlightServiceClient(grpcChannel);
-        }
-
-        public AsyncServerStreamingCall<FlightInfo> ListFlights(FlightCriteria criteria = null, Metadata headers = null)
-        {
-            if(criteria == null)
-            {
-                criteria = FlightCriteria.Empty;
-            }
-            
-            var response = _client.ListFlights(criteria.ToProtocol(), headers);
-            var convertStream = new StreamReader<Protocol.FlightInfo, FlightInfo>(response.ResponseStream, inFlight => new FlightInfo(inFlight));
-
-            return new AsyncServerStreamingCall<FlightInfo>(convertStream, response.ResponseHeadersAsync, response.GetStatus, response.GetTrailers, response.Dispose);
-        }
-
-        public AsyncServerStreamingCall<FlightActionType> ListActions(Metadata headers = null)
-        {
-            var response = _client.ListActions(EmptyInstance, headers);
-            var convertStream = new StreamReader<Protocol.ActionType, FlightActionType>(response.ResponseStream, actionType => new FlightActionType(actionType));
-
-            return new AsyncServerStreamingCall<FlightActionType>(convertStream, response.ResponseHeadersAsync, response.GetStatus, response.GetTrailers, response.Dispose);
-        }
-
-        public FlightRecordBatchStreamingCall GetStream(FlightTicket ticket, Metadata headers = null)
-        {
-            var stream = _client.DoGet(ticket.ToProtocol(),  headers);
-            var responseStream = new FlightClientRecordBatchStreamReader(stream.ResponseStream);
-            return new FlightRecordBatchStreamingCall(responseStream, stream.ResponseHeadersAsync, stream.GetStatus, stream.GetTrailers, stream.Dispose);
-        }
-
-        public AsyncUnaryCall<FlightInfo> GetInfo(FlightDescriptor flightDescriptor, Metadata headers = null)
-        {
-            var flightInfoResult = _client.GetFlightInfoAsync(flightDescriptor.ToProtocol(), headers);
-
-            var flightInfo = flightInfoResult
-                .ResponseAsync
-                .ContinueWith(async flightInfo => new FlightInfo(await flightInfo.ConfigureAwait(false)))
-                .Unwrap();
-
-            return new AsyncUnaryCall<FlightInfo>(
-                flightInfo,
-                flightInfoResult.ResponseHeadersAsync,
-                flightInfoResult.GetStatus,
-                flightInfoResult.GetTrailers,
-                flightInfoResult.Dispose);
-        }
-
-        public FlightRecordBatchDuplexStreamingCall StartPut(FlightDescriptor flightDescriptor, Metadata headers = null)
-        {
-            var channels = _client.DoPut(headers);
-            var requestStream = new FlightClientRecordBatchStreamWriter(channels.RequestStream, flightDescriptor);
-            var readStream = new StreamReader<Protocol.PutResult, FlightPutResult>(channels.ResponseStream, putResult => new FlightPutResult(putResult));
-            return new FlightRecordBatchDuplexStreamingCall(
-                requestStream,
-                readStream,
-                channels.ResponseHeadersAsync,
-                channels.GetStatus,
-                channels.GetTrailers,
-                channels.Dispose);
-        }
-
-        public AsyncServerStreamingCall<FlightResult> DoAction(FlightAction action, Metadata headers = null)
-        {
-            var stream = _client.DoAction(action.ToProtocol(), headers);
-            var streamReader = new StreamReader<Protocol.Result, FlightResult>(stream.ResponseStream, result => new FlightResult(result));
-            return new AsyncServerStreamingCall<FlightResult>(streamReader, stream.ResponseHeadersAsync, stream.GetStatus, stream.GetTrailers, stream.Dispose);
-        }
-
-        public AsyncUnaryCall<Schema> GetSchema(FlightDescriptor flightDescriptor, Metadata headers = null)
-        {
-            var schemaResult = _client.GetSchemaAsync(flightDescriptor.ToProtocol(), headers);
-
-            var schema = schemaResult
-                .ResponseAsync
-                .ContinueWith(async schema => FlightMessageSerializer.DecodeSchema((await schemaResult.ResponseAsync.ConfigureAwait(false)).Schema.Memory))
-                .Unwrap();
-
-            return new AsyncUnaryCall<Schema>(
-                schema,
-                schemaResult.ResponseHeadersAsync,
-                schemaResult.GetStatus,
-                schemaResult.GetTrailers,
-                schemaResult.Dispose);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Client/FlightClientRecordBatchStreamReader.cs b/csharp/src/Apache.Arrow.Flight/Client/FlightClientRecordBatchStreamReader.cs
deleted file mode 100644
index 011af0c..0000000
--- a/csharp/src/Apache.Arrow.Flight/Client/FlightClientRecordBatchStreamReader.cs
+++ /dev/null
@@ -1,28 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Flight.Protocol;
-using Apache.Arrow.Flight.Internal;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Client
-{
-    public class FlightClientRecordBatchStreamReader : FlightRecordBatchStreamReader
-    {
-        internal FlightClientRecordBatchStreamReader(IAsyncStreamReader<FlightData> flightDataStream) : base(flightDataStream)
-        {
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Client/FlightClientRecordBatchStreamWriter.cs b/csharp/src/Apache.Arrow.Flight/Client/FlightClientRecordBatchStreamWriter.cs
deleted file mode 100644
index d2e62c4..0000000
--- a/csharp/src/Apache.Arrow.Flight/Client/FlightClientRecordBatchStreamWriter.cs
+++ /dev/null
@@ -1,56 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using System.Threading.Tasks;
-using Apache.Arrow.Flight.Protocol;
-using Apache.Arrow.Flight.Internal;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Client
-{
-    public class FlightClientRecordBatchStreamWriter : FlightRecordBatchStreamWriter, IClientStreamWriter<RecordBatch>
-    {
-        private readonly IClientStreamWriter<FlightData> _clientStreamWriter;
-        private bool _completed = false;
-        internal FlightClientRecordBatchStreamWriter(IClientStreamWriter<FlightData> clientStreamWriter, FlightDescriptor flightDescriptor) : base(clientStreamWriter, flightDescriptor)
-        {
-            _clientStreamWriter = clientStreamWriter;
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (!_completed)
-            {
-                throw new InvalidOperationException("Dispose called before completing the stream.");
-            }
-
-            base.Dispose(disposing);
-        }
-
-        public async Task CompleteAsync()
-        {
-            if (_completed)
-            {
-                return;
-            }
-
-            await _clientStreamWriter.CompleteAsync().ConfigureAwait(false);
-            _completed = true;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Client/FlightRecordBatchDuplexStreamingCall.cs b/csharp/src/Apache.Arrow.Flight/Client/FlightRecordBatchDuplexStreamingCall.cs
deleted file mode 100644
index c9e6ecd..0000000
--- a/csharp/src/Apache.Arrow.Flight/Client/FlightRecordBatchDuplexStreamingCall.cs
+++ /dev/null
@@ -1,93 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Threading.Tasks;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Client
-{
-    public class FlightRecordBatchDuplexStreamingCall : IDisposable
-    {
-        private readonly Func<Status> _getStatusFunc;
-        private readonly Func<Metadata> _getTrailersFunc;
-        private readonly Action _disposeAction;
-
-        internal FlightRecordBatchDuplexStreamingCall(
-            FlightClientRecordBatchStreamWriter requestStream,
-            IAsyncStreamReader<FlightPutResult> responseStream,
-            Task<Metadata> responseHeadersAsync,
-            Func<Status> getStatusFunc,
-            Func<Metadata> getTrailersFunc,
-            Action disposeAction)
-        {
-            RequestStream = requestStream;
-            ResponseStream = responseStream;
-            ResponseHeadersAsync = responseHeadersAsync;
-            _getStatusFunc = getStatusFunc;
-            _getTrailersFunc = getTrailersFunc;
-            _disposeAction = disposeAction;
-        }
-
-        /// <summary>
-        ///  Async stream to read streaming responses.
-        /// </summary>
-        public IAsyncStreamReader<FlightPutResult> ResponseStream { get; }
-
-        /// <summary>
-        /// Async stream to send streaming requests.
-        /// </summary>
-        public FlightClientRecordBatchStreamWriter RequestStream { get; }
-
-        /// <summary>
-        /// Asynchronous access to response headers.
-        /// </summary>
-        public Task<Metadata> ResponseHeadersAsync { get; }
-
-        /// <summary>
-        /// Provides means to cleanup after the call. If the call has already finished normally
-        /// (response stream has been fully read), doesn't do anything. Otherwise, requests
-        /// cancellation of the call which should terminate all pending async operations
-        /// associated with the call. As a result, all resources being used by the call should
-        /// be released eventually.
-        /// </summary>
-        /// <remarks>
-        /// Normally, there is no need for you to dispose the call unless you want to utilize
-        /// the "Cancel" semantics of invoking Dispose.
-        /// </remarks>
-        public void Dispose()
-        {
-            _disposeAction();
-        }
-
-        /// <summary>
-        /// Gets the call status if the call has already finished. Throws InvalidOperationException otherwise.
-        /// </summary>
-        /// <returns></returns>
-        public Status GetStatus()
-        {
-            return _getStatusFunc();
-        }
-
-        /// <summary>
-        /// Gets the call trailing metadata if the call has already finished. Throws InvalidOperationException otherwise.
-        /// </summary>
-        /// <returns></returns>
-        public Metadata GetTrailers()
-        {
-            return _getTrailersFunc();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Client/FlightRecordBatchStreamingCall.cs b/csharp/src/Apache.Arrow.Flight/Client/FlightRecordBatchStreamingCall.cs
deleted file mode 100644
index 246cfa7..0000000
--- a/csharp/src/Apache.Arrow.Flight/Client/FlightRecordBatchStreamingCall.cs
+++ /dev/null
@@ -1,83 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Threading.Tasks;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Client
-{
-    public class FlightRecordBatchStreamingCall : IDisposable
-    {
-        private readonly Func<Status> _getStatusFunc;
-        private readonly Func<Metadata> _getTrailersFunc;
-        private readonly Action _disposeAction;
-
-        internal FlightRecordBatchStreamingCall(
-            FlightClientRecordBatchStreamReader recordBatchStreamReader,
-            Task<Metadata> responseHeadersAsync,
-            Func<Status> getStatusFunc,
-            Func<Metadata> getTrailersFunc,
-            Action disposeAction)
-        {
-            ResponseStream = recordBatchStreamReader;
-            ResponseHeadersAsync = responseHeadersAsync;
-            _getStatusFunc = getStatusFunc;
-            _getTrailersFunc = getTrailersFunc;
-            _disposeAction = disposeAction;
-        }
-
-        public FlightClientRecordBatchStreamReader ResponseStream { get; }
-
-        /// <summary>
-        /// Asynchronous access to response headers.
-        /// </summary>
-        public Task<Metadata> ResponseHeadersAsync { get; }
-
-        /// <summary>
-        /// Gets the call status if the call has already finished. Throws InvalidOperationException otherwise.
-        /// </summary>
-        /// <returns></returns>
-        public Status GetStatus()
-        {
-            return _getStatusFunc();
-        }
-
-        /// <summary>
-        /// Gets the call trailing metadata if the call has already finished. Throws InvalidOperationException otherwise.
-        /// </summary>
-        /// <returns></returns>
-        public Metadata GetTrailers()
-        {
-            return _getTrailersFunc();
-        }
-
-        /// <summary>
-        /// Provides means to cleanup after the call. If the call has already finished normally
-        /// (response stream has been fully read), doesn't do anything. Otherwise, requests
-        /// cancellation of the call which should terminate all pending async operations
-        /// associated with the call. As a result, all resources being used by the call should
-        /// be released eventually.
-        /// </summary>
-        /// <remarks>
-        /// Normally, there is no need for you to dispose the call unless you want to utilize
-        /// the "Cancel" semantics of invoking Dispose.
-        /// </remarks>
-        public void Dispose()
-        {
-            _disposeAction();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightAction.cs b/csharp/src/Apache.Arrow.Flight/FlightAction.cs
deleted file mode 100644
index 4a82fa6..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightAction.cs
+++ /dev/null
@@ -1,75 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using Google.Protobuf;
-
-namespace Apache.Arrow.Flight
-{
-    public class FlightAction
-    {
-        private readonly Protocol.Action _action;
-        internal FlightAction(Protocol.Action action)
-        {
-            _action = action;
-        }
-
-        public FlightAction(string type, ByteString body)
-        {
-            _action = new Protocol.Action()
-            {
-                Body = body,
-                Type = type
-            };
-        }
-
-        public FlightAction(string type, string body)
-        {
-            _action = new Protocol.Action()
-            {
-                Body = ByteString.CopyFromUtf8(body),
-                Type = type
-            };
-        }
-
-        public FlightAction(string type, byte[] body)
-        {
-            _action = new Protocol.Action()
-            {
-                Body = ByteString.CopyFrom(body),
-                Type = type
-            };
-        }
-
-        public FlightAction(string type)
-        {
-            _action = new Protocol.Action()
-            {
-                Type = type
-            };
-        }
-
-        public string Type => _action.Type;
-
-        public ByteString Body => _action.Body;
-
-        internal Protocol.Action ToProtocol()
-        {
-            return _action;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightActionType.cs b/csharp/src/Apache.Arrow.Flight/FlightActionType.cs
deleted file mode 100644
index 8df8939..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightActionType.cs
+++ /dev/null
@@ -1,61 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-
-namespace Apache.Arrow.Flight
-{
-    public class FlightActionType
-    {
-        private readonly Protocol.ActionType _actionType;
-        internal FlightActionType(Protocol.ActionType actionType)
-        {
-            _actionType = actionType;
-        }
-
-        public FlightActionType(string type, string description)
-        {
-            _actionType = new Protocol.ActionType()
-            {
-                Description = description,
-                Type = type
-            };
-        }
-
-        public string Type => _actionType.Type;
-        public string Description => _actionType.Description;
-
-        internal Protocol.ActionType ToProtocol()
-        {
-            return _actionType;
-        }
-
-        public override bool Equals(object obj)
-        {
-            if(obj is FlightActionType other)
-            {
-                return Equals(_actionType, other._actionType);
-            }
-            return false;
-        }
-
-        public override int GetHashCode()
-        {
-            return _actionType.GetHashCode();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightCriteria.cs b/csharp/src/Apache.Arrow.Flight/FlightCriteria.cs
deleted file mode 100644
index 6bcb087..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightCriteria.cs
+++ /dev/null
@@ -1,70 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using Google.Protobuf;
-
-namespace Apache.Arrow.Flight
-{
-    public class FlightCriteria
-    {
-        internal static readonly FlightCriteria Empty = new FlightCriteria();
-
-        private readonly Protocol.Criteria _criteria;
-
-        internal FlightCriteria(Protocol.Criteria criteria)
-        {
-            _criteria = criteria;
-        }
-
-        public FlightCriteria()
-        {
-            _criteria = new Protocol.Criteria();
-        }
-
-        public FlightCriteria(string expression)
-        {
-            _criteria = new Protocol.Criteria()
-            {
-                Expression = ByteString.CopyFromUtf8(expression)
-            };
-        }
-
-        public FlightCriteria(byte[] bytes)
-        {
-            _criteria = new Protocol.Criteria()
-            {
-                Expression = ByteString.CopyFrom(bytes)
-            };
-        }
-
-        public FlightCriteria(ByteString byteString)
-        {
-            _criteria = new Protocol.Criteria()
-            {
-                Expression = byteString
-            };
-        }
-
-        public ByteString Expression => _criteria.Expression;
-
-        internal Protocol.Criteria ToProtocol()
-        {
-            return _criteria;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightDescriptor.cs b/csharp/src/Apache.Arrow.Flight/FlightDescriptor.cs
deleted file mode 100644
index 7d44332..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightDescriptor.cs
+++ /dev/null
@@ -1,102 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using Google.Protobuf;
-
-namespace Apache.Arrow.Flight
-{
-    public class FlightDescriptor
-    {
-        private readonly Protocol.FlightDescriptor _flightDescriptor;
-
-        private FlightDescriptor(ByteString command)
-        {
-            _flightDescriptor = new Protocol.FlightDescriptor()
-            {
-                Cmd = command,
-                Type = Protocol.FlightDescriptor.Types.DescriptorType.Cmd
-            };
-        }
-
-        private FlightDescriptor(params string[] paths)
-        {
-            _flightDescriptor = new Protocol.FlightDescriptor()
-            {
-                Type = Protocol.FlightDescriptor.Types.DescriptorType.Path
-            };
-
-            foreach(var path in paths)
-            {
-                _flightDescriptor.Path.Add(path);
-            }
-        }
-
-
-        public static FlightDescriptor CreateCommandDescriptor(byte[] command)
-        {
-            return new FlightDescriptor(ByteString.CopyFrom(command));
-        }
-
-        public static FlightDescriptor CreateCommandDescriptor(string command)
-        {
-            return new FlightDescriptor(ByteString.CopyFromUtf8(command));
-        }
-
-        public static FlightDescriptor CreatePathDescriptor(params string[] paths)
-        {
-            return new FlightDescriptor(paths);
-        }
-
-
-        internal FlightDescriptor(Protocol.FlightDescriptor flightDescriptor)
-        {
-            if(flightDescriptor.Type != Protocol.FlightDescriptor.Types.DescriptorType.Cmd && flightDescriptor.Type != Protocol.FlightDescriptor.Types.DescriptorType.Path)
-            {
-                throw new NotSupportedException();
-            }
-            _flightDescriptor = flightDescriptor;
-        }
-
-        internal Protocol.FlightDescriptor ToProtocol()
-        {
-            return _flightDescriptor;
-        }
-
-        public FlightDescriptorType Type => (FlightDescriptorType)_flightDescriptor.Type;
-
-        public IEnumerable<string> Paths => _flightDescriptor.Path;
-
-        public ByteString Command => _flightDescriptor.Cmd;
-
-
-        public override int GetHashCode()
-        {
-            return _flightDescriptor.GetHashCode();
-        }
-
-        public override bool Equals(object obj)
-        {
-            if(obj is FlightDescriptor other)
-            {
-                return Equals(_flightDescriptor, other._flightDescriptor);
-            }
-            return false;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightDescriptorType.cs b/csharp/src/Apache.Arrow.Flight/FlightDescriptorType.cs
deleted file mode 100644
index 120ed22..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightDescriptorType.cs
+++ /dev/null
@@ -1,23 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-namespace Apache.Arrow.Flight
-{
-    public enum FlightDescriptorType
-    {
-        Path = 1,
-        Command = 2
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightEndpoint.cs b/csharp/src/Apache.Arrow.Flight/FlightEndpoint.cs
deleted file mode 100644
index ab15fed..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightEndpoint.cs
+++ /dev/null
@@ -1,73 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-
-namespace Apache.Arrow.Flight
-{
-    public class FlightEndpoint
-    {
-        private readonly FlightTicket _ticket;
-        private readonly IReadOnlyList<FlightLocation> _locations;
-        internal FlightEndpoint(Protocol.FlightEndpoint flightEndpoint)
-        {
-            _ticket = new FlightTicket(flightEndpoint.Ticket);
-            _locations = flightEndpoint.Location.Select(x => new FlightLocation(x)).ToList();
-        }
-
-        public FlightEndpoint(FlightTicket ticket, IReadOnlyList<FlightLocation> locations)
-        {
-            _ticket = ticket;
-            _locations = locations;
-        }
-
-        public FlightTicket Ticket => _ticket;
-
-        public IEnumerable<FlightLocation> Locations => _locations;
-
-        internal Protocol.FlightEndpoint ToProtocol()
-        {
-            var output = new Protocol.FlightEndpoint()
-            {
-                Ticket = _ticket.ToProtocol()
-            };
-
-            foreach(var location in _locations)
-            {
-                output.Location.Add(location.ToProtocol());
-            }
-            return output;
-        }
-
-        public override bool Equals(object obj)
-        {
-            if(obj is FlightEndpoint other)
-            {
-                return Equals(_ticket, other._ticket) &&
-                    Enumerable.SequenceEqual(_locations, other._locations);
-            }
-            return false;
-        }
-
-        public override int GetHashCode()
-        {
-            //Ticket should contain enough to get a good hash code
-            return _ticket.GetHashCode();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightInfo.cs b/csharp/src/Apache.Arrow.Flight/FlightInfo.cs
deleted file mode 100644
index 44a7965..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightInfo.cs
+++ /dev/null
@@ -1,78 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using Apache.Arrow.Flight.Internal;
-using Apache.Arrow.Ipc;
-
-namespace Apache.Arrow.Flight
-{
-    public class FlightInfo
-    {
-        internal FlightInfo(Protocol.FlightInfo flightInfo)
-        {
-            Schema = FlightMessageSerializer.DecodeSchema(flightInfo.Schema.Memory);
-            Descriptor = new FlightDescriptor(flightInfo.FlightDescriptor);
-
-            var endpoints = new List<FlightEndpoint>();
-            foreach(var endpoint in flightInfo.Endpoint)
-            {
-                endpoints.Add(new FlightEndpoint(endpoint));
-            }
-            Endpoints = endpoints;
-
-            TotalBytes = flightInfo.TotalBytes;
-            TotalRecords = flightInfo.TotalRecords;
-        }
-
-        public FlightInfo(Schema schema, FlightDescriptor descriptor, IReadOnlyList<FlightEndpoint> endpoints, long totalRecords = 0, long totalBytes = 0)
-        {
-            Schema = schema;
-            Descriptor = descriptor;
-            Endpoints = endpoints;
-            TotalBytes = totalBytes;
-            TotalRecords = totalRecords;
-        }
-
-        public FlightDescriptor Descriptor { get; }
-
-        public Schema Schema { get; }
-
-        public long TotalBytes { get; }
-
-        public long TotalRecords { get; }
-
-        public IReadOnlyList<FlightEndpoint> Endpoints { get; }
-
-        internal Protocol.FlightInfo ToProtocol()
-        {
-            var serializedSchema = SchemaWriter.SerializeSchema(Schema);
-            var response = new Protocol.FlightInfo()
-            {
-                Schema = serializedSchema,
-                FlightDescriptor = Descriptor.ToProtocol()
-            };
-
-            foreach(var endpoint in Endpoints)
-            {
-                response.Endpoint.Add(endpoint.ToProtocol());
-            }
-
-            return response;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightLocation.cs b/csharp/src/Apache.Arrow.Flight/FlightLocation.cs
deleted file mode 100644
index 25b9d5d..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightLocation.cs
+++ /dev/null
@@ -1,59 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-
-namespace Apache.Arrow.Flight
-{
-    public class FlightLocation
-    {
-        private readonly Protocol.Location _location;
-        internal FlightLocation(Protocol.Location location)
-        {
-            _location = location;
-        }
-
-        public FlightLocation(string uri)
-        {
-            _location = new Protocol.Location()
-            {
-                Uri = uri
-            };
-        }
-
-        public string Uri => _location.Uri;
-
-        internal Protocol.Location ToProtocol()
-        {
-            return _location;
-        }
-
-        public override bool Equals(object obj)
-        {
-            if(obj is FlightLocation other)
-            {
-                return Equals(_location, other._location);
-            }
-            return false;
-        }
-
-        public override int GetHashCode()
-        {
-            return _location.GetHashCode();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightPutResult.cs b/csharp/src/Apache.Arrow.Flight/FlightPutResult.cs
deleted file mode 100644
index 16f278a..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightPutResult.cs
+++ /dev/null
@@ -1,64 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using Google.Protobuf;
-
-namespace Apache.Arrow.Flight
-{
-    public class FlightPutResult
-    {
-        public static readonly FlightPutResult Empty = new FlightPutResult();
-
-        private readonly Protocol.PutResult _putResult;
-
-        public FlightPutResult()
-        {
-            _putResult = new Protocol.PutResult();
-        }
-
-        public FlightPutResult(ByteString applicationMetadata)
-        {
-            _putResult = new Protocol.PutResult()
-            {
-                AppMetadata = applicationMetadata
-            };
-        }
-
-        public FlightPutResult(byte[] applicationMetadata)
-            : this(ByteString.CopyFrom(applicationMetadata))
-        {
-        }
-
-        public FlightPutResult(string applicationMetadata)
-            : this(ByteString.CopyFromUtf8(applicationMetadata))
-        {
-        }
-
-        internal FlightPutResult(Protocol.PutResult putResult)
-        {
-            _putResult = putResult;
-        }
-
-        public ByteString ApplicationMetadata => _putResult.AppMetadata;
-
-        internal Protocol.PutResult ToProtocol()
-        {
-            return _putResult;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightRecordBatchStreamReader.cs b/csharp/src/Apache.Arrow.Flight/FlightRecordBatchStreamReader.cs
deleted file mode 100644
index 5881275..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightRecordBatchStreamReader.cs
+++ /dev/null
@@ -1,104 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-using Apache.Arrow.Flatbuf;
-using Apache.Arrow.Flight.Internal;
-using Apache.Arrow.Flight.Protocol;
-using Apache.Arrow.Ipc;
-using Google.Protobuf;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight
-{
-    /// <summary>
-    /// Stream of record batches
-    ///
-    /// Use MoveNext() and Current to iterate over the batches.
-    /// There are also gRPC helper functions such as ToListAsync() etc.
-    /// </summary>
-    public abstract class FlightRecordBatchStreamReader : IAsyncStreamReader<RecordBatch>, IAsyncEnumerable<RecordBatch>, IDisposable
-    {
-        //Temporary until .NET 5.0 upgrade
-        private static ValueTask CompletedValueTask = new ValueTask();
-
-        private readonly RecordBatcReaderImplementation _arrowReaderImplementation;
-
-        private protected FlightRecordBatchStreamReader(IAsyncStreamReader<Protocol.FlightData> flightDataStream)
-        {
-            _arrowReaderImplementation = new RecordBatcReaderImplementation(flightDataStream);
-        }
-
-        public ValueTask<Schema> Schema => _arrowReaderImplementation.ReadSchema();
-
-        internal ValueTask<FlightDescriptor> GetFlightDescriptor()
-        {
-            return _arrowReaderImplementation.ReadFlightDescriptor();
-        }        
-
-        /// <summary>
-        /// Get the application metadata from the latest recieved record batch
-        /// </summary>
-        public IReadOnlyList<ByteString> ApplicationMetadata => _arrowReaderImplementation.ApplicationMetadata;
-
-        public RecordBatch Current { get; private set; }
-
-        public async Task<bool> MoveNext(CancellationToken cancellationToken)
-        {
-            Current = await _arrowReaderImplementation.ReadNextRecordBatchAsync(cancellationToken);
-
-            return Current != null;
-        }
-
-        public IAsyncEnumerator<RecordBatch> GetAsyncEnumerator(CancellationToken cancellationToken = default)
-        {
-            return new AsyncEnumerator(this, cancellationToken);
-        }
-
-        public void Dispose()
-        {
-            _arrowReaderImplementation.Dispose();
-        }
-
-        private class AsyncEnumerator : IAsyncEnumerator<RecordBatch>
-        {
-            private readonly FlightRecordBatchStreamReader _flightRecordBatchStreamReader;
-            private readonly CancellationToken _cancellationToken;
-
-            internal AsyncEnumerator(FlightRecordBatchStreamReader flightRecordBatchStreamReader, CancellationToken cancellationToken)
-            {
-                _flightRecordBatchStreamReader = flightRecordBatchStreamReader;
-                _cancellationToken = cancellationToken;
-            }
-
-            public RecordBatch Current => _flightRecordBatchStreamReader.Current;
-
-            public async ValueTask<bool> MoveNextAsync()
-            {
-                return await _flightRecordBatchStreamReader.MoveNext(_cancellationToken);
-            }
-
-            public ValueTask DisposeAsync()
-            {
-                _flightRecordBatchStreamReader.Dispose();
-                return CompletedValueTask;
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightRecordBatchStreamWriter.cs b/csharp/src/Apache.Arrow.Flight/FlightRecordBatchStreamWriter.cs
deleted file mode 100644
index a72be5a..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightRecordBatchStreamWriter.cs
+++ /dev/null
@@ -1,77 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using System.Threading.Tasks;
-using Apache.Arrow.Flight.Internal;
-using Apache.Arrow.Flight.Protocol;
-using Google.Protobuf;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight
-{
-    public abstract class FlightRecordBatchStreamWriter : IAsyncStreamWriter<RecordBatch>, IDisposable
-    {
-        private FlightDataStream _flightDataStream;
-        private readonly IAsyncStreamWriter<FlightData> _clientStreamWriter;
-        private readonly FlightDescriptor _flightDescriptor;
-
-        private bool _disposed;
-
-        private protected FlightRecordBatchStreamWriter(IAsyncStreamWriter<FlightData> clientStreamWriter, FlightDescriptor flightDescriptor)
-        {
-            _clientStreamWriter = clientStreamWriter;
-            _flightDescriptor = flightDescriptor;
-        }
-
-        private void SetupStream(Schema schema)
-        {
-            _flightDataStream = new FlightDataStream(_clientStreamWriter, _flightDescriptor, schema);
-        }
-
-        public WriteOptions WriteOptions { get => throw new NotImplementedException(); set => throw new NotImplementedException(); }
-
-        public Task WriteAsync(RecordBatch message)
-        {
-            return WriteAsync(message, default);
-        }
-
-        public Task WriteAsync(RecordBatch message, ByteString applicationMetadata)
-        {
-            if (_flightDataStream == null)
-            {
-                SetupStream(message.Schema);
-            }
-
-            return _flightDataStream.Write(message, applicationMetadata);
-        }
-
-        protected virtual void Dispose(bool disposing)
-        {
-            if (!_disposed)
-            {
-                _flightDataStream.Dispose();
-                _disposed = true;
-            }
-        }
-
-        public void Dispose()
-        {
-            Dispose(true);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightResult.cs b/csharp/src/Apache.Arrow.Flight/FlightResult.cs
deleted file mode 100644
index 3ddadd4..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightResult.cs
+++ /dev/null
@@ -1,71 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using Google.Protobuf;
-
-namespace Apache.Arrow.Flight
-{
-    public class FlightResult
-    {
-        private readonly Protocol.Result _result;
-
-        internal FlightResult(Protocol.Result result)
-        {
-            _result = result;
-        }
-
-        public FlightResult(ByteString body)
-        {
-            _result = new Protocol.Result()
-            {
-                Body = body
-            };
-        }
-
-        public FlightResult(string body)
-            : this(ByteString.CopyFromUtf8(body))
-        {
-        }
-
-        public FlightResult(byte[] body)
-            : this(ByteString.CopyFrom(body))
-        {
-        }
-
-        public ByteString Body => _result.Body;
-
-        internal Protocol.Result ToProtocol()
-        {
-            return _result;
-        }
-
-        public override bool Equals(object obj)
-        {
-            if(obj is FlightResult other)
-            {
-                return Equals(_result, other._result);
-            }
-            return false;
-        }
-
-        public override int GetHashCode()
-        {
-            return _result.GetHashCode();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/FlightTicket.cs b/csharp/src/Apache.Arrow.Flight/FlightTicket.cs
deleted file mode 100644
index 7b3d6dd..0000000
--- a/csharp/src/Apache.Arrow.Flight/FlightTicket.cs
+++ /dev/null
@@ -1,70 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using Google.Protobuf;
-
-namespace Apache.Arrow.Flight
-{
-    public class FlightTicket
-    {
-        private readonly Protocol.Ticket _ticket;
-        internal FlightTicket(Protocol.Ticket ticket)
-        {
-            _ticket = ticket;
-        }
-
-        public FlightTicket(ByteString ticket)
-        {
-            _ticket = new Protocol.Ticket()
-            {
-                Ticket_ = ticket
-            };
-        }
-
-        public FlightTicket(string ticket)
-            : this(ByteString.CopyFromUtf8(ticket))
-        {
-        }
-
-        public FlightTicket(byte[] bytes)
-            : this(ByteString.CopyFrom(bytes))
-        {
-        }
-
-        public ByteString Ticket => _ticket.Ticket_;
-
-        internal Protocol.Ticket ToProtocol()
-        {
-            return _ticket;
-        }
-
-        public override bool Equals(object obj)
-        {
-            if(obj is FlightTicket other)
-            {
-                return Equals(_ticket, other._ticket);
-            }
-            return false;
-        }
-
-        public override int GetHashCode()
-        {
-            return _ticket.GetHashCode();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Internal/FlightDataStream.cs b/csharp/src/Apache.Arrow.Flight/Internal/FlightDataStream.cs
deleted file mode 100644
index 8658845..0000000
--- a/csharp/src/Apache.Arrow.Flight/Internal/FlightDataStream.cs
+++ /dev/null
@@ -1,109 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.IO;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-using Apache.Arrow.Flatbuf;
-using Apache.Arrow.Flight.Protocol;
-using Apache.Arrow.Ipc;
-using FlatBuffers;
-using Google.Protobuf;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Internal
-{
-    /// <summary>
-    /// Handles writing record batches as flight data
-    /// </summary>
-    internal class FlightDataStream : ArrowStreamWriter
-    {
-        private readonly FlightDescriptor _flightDescriptor;
-        private readonly IAsyncStreamWriter<FlightData> _clientStreamWriter;
-        private Protocol.FlightData _currentFlightData;
-
-        public FlightDataStream(IAsyncStreamWriter<FlightData> clientStreamWriter, FlightDescriptor flightDescriptor, Schema schema)
-            : base(new MemoryStream(), schema)
-        {
-            _clientStreamWriter = clientStreamWriter;
-            _flightDescriptor = flightDescriptor;
-        }
-
-        private async Task SendSchema()
-        {
-            _currentFlightData = new Protocol.FlightData();
-
-            if(_flightDescriptor != null)
-            {
-                _currentFlightData.FlightDescriptor = _flightDescriptor.ToProtocol();
-            }
-
-            var offset = SerializeSchema(Schema);
-            CancellationTokenSource cancellationTokenSource = new CancellationTokenSource();
-            await WriteMessageAsync(MessageHeader.Schema, offset, 0, cancellationTokenSource.Token).ConfigureAwait(false);
-            await _clientStreamWriter.WriteAsync(_currentFlightData).ConfigureAwait(false);
-            HasWrittenSchema = true;
-        }
-
-        private void ResetStream()
-        {
-            this.BaseStream.Position = 0;
-            this.BaseStream.SetLength(0);
-        }
-
-        public async Task Write(RecordBatch recordBatch, ByteString applicationMetadata)
-        {
-            if (!HasWrittenSchema)
-            {
-                await SendSchema().ConfigureAwait(false);
-            }
-            ResetStream();
-
-            _currentFlightData = new Protocol.FlightData();
-
-            if(applicationMetadata != null)
-            {
-                _currentFlightData.AppMetadata = applicationMetadata;
-            }
-
-            await WriteRecordBatchInternalAsync(recordBatch).ConfigureAwait(false);
-
-            //Reset stream position
-            this.BaseStream.Position = 0;
-            var bodyData = await ByteString.FromStreamAsync(this.BaseStream).ConfigureAwait(false);
-
-            _currentFlightData.DataBody = bodyData;
-            await _clientStreamWriter.WriteAsync(_currentFlightData).ConfigureAwait(false);
-        }
-
-        private protected override ValueTask<long> WriteMessageAsync<T>(MessageHeader headerType, Offset<T> headerOffset, int bodyLength, CancellationToken cancellationToken)
-        {
-            Offset<Flatbuf.Message> messageOffset = Flatbuf.Message.CreateMessage(
-                Builder, CurrentMetadataVersion, headerType, headerOffset.Value,
-                bodyLength);
-
-            Builder.Finish(messageOffset.Value);
-
-            ReadOnlyMemory<byte> messageData = Builder.DataBuffer.ToReadOnlyMemory(Builder.DataBuffer.Position, Builder.Offset);
-
-            _currentFlightData.DataHeader = ByteString.CopyFrom(messageData.Span);
-
-            return new ValueTask<long>(0);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Internal/FlightMessageSerializer.cs b/csharp/src/Apache.Arrow.Flight/Internal/FlightMessageSerializer.cs
deleted file mode 100644
index 0ac2d19..0000000
--- a/csharp/src/Apache.Arrow.Flight/Internal/FlightMessageSerializer.cs
+++ /dev/null
@@ -1,57 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Buffers.Binary;
-using System.Collections.Generic;
-using System.IO;
-using System.Text;
-using Apache.Arrow.Ipc;
-using FlatBuffers;
-
-namespace Apache.Arrow.Flight
-{
-    internal static class FlightMessageSerializer
-    {
-        public static Schema DecodeSchema(ReadOnlyMemory<byte> buffer)
-        {
-            int bufferPosition = 0;
-            int schemaMessageLength = BinaryPrimitives.ReadInt32LittleEndian(buffer.Span.Slice(bufferPosition));
-            bufferPosition += sizeof(int);
-
-            if (schemaMessageLength == MessageSerializer.IpcContinuationToken)
-            {
-                // ARROW-6313, if the first 4 bytes are continuation message, read the next 4 for the length
-                if (buffer.Length <= bufferPosition + sizeof(int))
-                {
-                    throw new InvalidDataException("Corrupted IPC message. Received a continuation token at the end of the message.");
-                }
-
-                schemaMessageLength = BinaryPrimitives.ReadInt32LittleEndian(buffer.Span.Slice(bufferPosition));
-                bufferPosition += sizeof(int);
-            }
-
-            ByteBuffer schemaBuffer = ArrowReaderImplementation.CreateByteBuffer(buffer.Slice(bufferPosition));
-            var schema = MessageSerializer.GetSchema(ArrowReaderImplementation.ReadMessage<Flatbuf.Schema>(schemaBuffer));
-            return schema;
-        }
-
-        internal static Schema DecodeSchema(ByteBuffer schemaBuffer)
-        {
-            var schema = MessageSerializer.GetSchema(ArrowReaderImplementation.ReadMessage<Flatbuf.Schema>(schemaBuffer));
-            return schema;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Internal/RecordBatcReaderImplementation.cs b/csharp/src/Apache.Arrow.Flight/Internal/RecordBatcReaderImplementation.cs
deleted file mode 100644
index 10d4d73..0000000
--- a/csharp/src/Apache.Arrow.Flight/Internal/RecordBatcReaderImplementation.cs
+++ /dev/null
@@ -1,131 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-using Apache.Arrow.Flatbuf;
-using Apache.Arrow.Ipc;
-using Google.Protobuf;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Internal
-{
-    internal class RecordBatcReaderImplementation : ArrowReaderImplementation
-    {
-        private readonly IAsyncStreamReader<Protocol.FlightData> _flightDataStream;
-        private FlightDescriptor _flightDescriptor;
-        private readonly List<ByteString> _applicationMetadatas;
-
-        public RecordBatcReaderImplementation(IAsyncStreamReader<Protocol.FlightData> streamReader)
-        {
-            _flightDataStream = streamReader;
-            _applicationMetadatas = new List<ByteString>();
-        }
-
-        public override RecordBatch ReadNextRecordBatch()
-        {
-            throw new NotImplementedException();
-        }
-
-        public IReadOnlyList<ByteString> ApplicationMetadata => _applicationMetadatas;
-
-        public async ValueTask<FlightDescriptor> ReadFlightDescriptor()
-        {
-            if (!HasReadSchema)
-            {
-                await ReadSchema().ConfigureAwait(false);
-            }
-            return _flightDescriptor;
-        }
-
-        public async ValueTask<Schema> ReadSchema()
-        {
-            if (HasReadSchema)
-            {
-                return Schema;
-            }
-
-            var moveNextResult = await _flightDataStream.MoveNext().ConfigureAwait(false);
-
-            if (!moveNextResult)
-            {
-                throw new Exception("No records or schema in this flight");
-            }
-
-            //AppMetadata will never be null, but length 0 if empty
-            //Those are skipped
-            if(_flightDataStream.Current.AppMetadata.Length > 0)
-            {
-                _applicationMetadatas.Add(_flightDataStream.Current.AppMetadata);
-            }
-
-            var header = _flightDataStream.Current.DataHeader.Memory;
-            Message message = Message.GetRootAsMessage(
-                ArrowReaderImplementation.CreateByteBuffer(header));
-
-
-            if(_flightDataStream.Current.FlightDescriptor != null)
-            {
-                _flightDescriptor = new FlightDescriptor(_flightDataStream.Current.FlightDescriptor);
-            }
-
-            switch (message.HeaderType)
-            {
-                case MessageHeader.Schema:
-                    Schema = FlightMessageSerializer.DecodeSchema(message.ByteBuffer);
-                    break;
-                default:
-                    throw new Exception($"Expected schema as the first message, but got: {message.HeaderType.ToString()}");
-            }
-            return Schema;
-        }
-
-        public override async ValueTask<RecordBatch> ReadNextRecordBatchAsync(CancellationToken cancellationToken)
-        {
-            _applicationMetadatas.Clear(); //Clear any metadata from previous calls
-
-            if (!HasReadSchema)
-            {
-                await ReadSchema().ConfigureAwait(false);
-            }
-            var moveNextResult = await _flightDataStream.MoveNext().ConfigureAwait(false);
-            if (moveNextResult)
-            {
-                //AppMetadata will never be null, but length 0 if empty
-                //Those are skipped
-                if (_flightDataStream.Current.AppMetadata.Length > 0)
-                {
-                    _applicationMetadatas.Add(_flightDataStream.Current.AppMetadata);
-                }
-
-                var header = _flightDataStream.Current.DataHeader.Memory;
-                Message message = Message.GetRootAsMessage(CreateByteBuffer(header));
-
-                switch (message.HeaderType)
-                {
-                    case MessageHeader.RecordBatch:
-                        var body = _flightDataStream.Current.DataBody.Memory;
-                        return CreateArrowObjectFromMessage(message, CreateByteBuffer(body.Slice(0, (int)message.BodyLength)), null);
-                    default:
-                        throw new NotImplementedException();
-                }
-            }
-            return null;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Internal/SchemaWriter.cs b/csharp/src/Apache.Arrow.Flight/Internal/SchemaWriter.cs
deleted file mode 100644
index c7e7d81..0000000
--- a/csharp/src/Apache.Arrow.Flight/Internal/SchemaWriter.cs
+++ /dev/null
@@ -1,55 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.IO;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-using Apache.Arrow.Flatbuf;
-using Apache.Arrow.Ipc;
-using Google.Protobuf;
-
-namespace Apache.Arrow.Flight.Internal
-{
-    /// <summary>
-    /// This class handles writing schemas
-    /// </summary>
-    internal class SchemaWriter : ArrowStreamWriter
-    {
-        private SchemaWriter(Stream baseStream, Schema schema) : base(baseStream, schema)
-        {
-        }
-
-        public void WriteSchema(Schema schema, CancellationToken cancellationToken)
-        {
-            var offset = base.SerializeSchema(schema);
-            WriteMessage(MessageHeader.Schema, offset, 0);
-        }
-
-        public static ByteString SerializeSchema(Schema schema, CancellationToken cancellationToken = default(CancellationToken))
-        {
-            using(var memoryStream = new MemoryStream())
-            {
-                var writer = new SchemaWriter(memoryStream, schema);
-                writer.WriteSchema(schema, cancellationToken);
-
-                memoryStream.Position = 0;
-                return ByteString.FromStream(memoryStream);
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Internal/StreamReader.cs b/csharp/src/Apache.Arrow.Flight/Internal/StreamReader.cs
deleted file mode 100644
index a2c3db3..0000000
--- a/csharp/src/Apache.Arrow.Flight/Internal/StreamReader.cs
+++ /dev/null
@@ -1,54 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Internal
-{
-
-    /// <summary>
-    /// This is a helper class that allows conversions from gRPC types to the Arrow types.
-    /// It maintains the stream so data can be read as soon as possible.
-    /// </summary>
-    /// <typeparam name="TIn">In paramter from gRPC</typeparam>
-    /// <typeparam name="TOut">The arrow type returned</typeparam>
-    internal class StreamReader<TIn, TOut> : IAsyncStreamReader<TOut>
-    {
-        private readonly IAsyncStreamReader<TIn> _inputStream;
-        private readonly Func<TIn, TOut> _convertFunction;
-        internal StreamReader(IAsyncStreamReader<TIn> inputStream, Func<TIn, TOut> convertFunction)
-        {
-            _inputStream = inputStream;
-            _convertFunction = convertFunction;
-        }
-
-        public TOut Current { get; private set; }
-
-        public async Task<bool> MoveNext(CancellationToken cancellationToken)
-        {
-            var moveNextResult = await _inputStream.MoveNext(cancellationToken).ConfigureAwait(false);
-            if (moveNextResult)
-            {
-                Current = _convertFunction(_inputStream.Current);
-            }
-            return moveNextResult;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Internal/StreamWriter.cs b/csharp/src/Apache.Arrow.Flight/Internal/StreamWriter.cs
deleted file mode 100644
index c50b41e..0000000
--- a/csharp/src/Apache.Arrow.Flight/Internal/StreamWriter.cs
+++ /dev/null
@@ -1,51 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using System.Threading.Tasks;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Internal
-{
-    internal class StreamWriter<TIn, TOut> : IAsyncStreamWriter<TIn>
-    {
-        private readonly IAsyncStreamWriter<TOut> _inputStream;
-        private readonly Func<TIn, TOut> _convertFunction;
-        internal StreamWriter(IAsyncStreamWriter<TOut> inputStream, Func<TIn, TOut> convertFunction)
-        {
-            _inputStream = inputStream;
-            _convertFunction = convertFunction;
-        }
-
-        public WriteOptions WriteOptions
-        {
-            get
-            {
-                return _inputStream.WriteOptions;
-            }
-            set
-            {
-                _inputStream.WriteOptions = value;
-            }
-        }
-
-        public Task WriteAsync(TIn message)
-        {
-            return _inputStream.WriteAsync(_convertFunction(message));
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Properties/AssemblyInfo.cs b/csharp/src/Apache.Arrow.Flight/Properties/AssemblyInfo.cs
deleted file mode 100644
index 07934ad..0000000
--- a/csharp/src/Apache.Arrow.Flight/Properties/AssemblyInfo.cs
+++ /dev/null
@@ -1,18 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System.Runtime.CompilerServices;
-
-[assembly: InternalsVisibleTo("Apache.Arrow.Flight.AspNetCore, PublicKey=0024000004800000940000000602000000240000525341310004000001000100e504183f6d470d6b67b6d19212be3e1f598f70c246a120194bc38130101d0c1853e4a0f2232cb12e37a7a90e707aabd38511dac4f25fcb0d691b2aa265900bf42de7f70468fc997551a40e1e0679b605aa2088a4a69e07c117e988f5b1738c570ee66997fba02485e7856a49eca5fd0706d09899b8312577cbb9034599fc92d4")]
diff --git a/csharp/src/Apache.Arrow.Flight/Server/FlightServer.cs b/csharp/src/Apache.Arrow.Flight/Server/FlightServer.cs
deleted file mode 100644
index 30b0409..0000000
--- a/csharp/src/Apache.Arrow.Flight/Server/FlightServer.cs
+++ /dev/null
@@ -1,61 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using System.Threading.Tasks;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Server
-{
-    public abstract class FlightServer
-    {
-        public virtual Task DoPut(FlightServerRecordBatchStreamReader requestStream, IAsyncStreamWriter<FlightPutResult> responseStream, ServerCallContext context)
-        {
-            throw new NotImplementedException();
-        }
-
-        public virtual Task DoGet(FlightTicket ticket, FlightServerRecordBatchStreamWriter responseStream, ServerCallContext context)
-        {
-            throw new NotImplementedException();
-        }
-
-        public virtual Task ListFlights(FlightCriteria request, IAsyncStreamWriter<FlightInfo> responseStream, ServerCallContext context)
-        {
-            throw new NotImplementedException();
-        }
-
-        public virtual Task ListActions(IAsyncStreamWriter<FlightActionType> responseStream, ServerCallContext context)
-        {
-            throw new NotImplementedException();
-        }
-
-        public virtual Task DoAction(FlightAction request, IAsyncStreamWriter<FlightResult> responseStream, ServerCallContext context)
-        {
-            throw new NotImplementedException();
-        }
-
-        public virtual Task<Schema> GetSchema(FlightDescriptor request, ServerCallContext context)
-        {
-            throw new NotImplementedException();
-        }
-
-        public virtual Task<FlightInfo> GetFlightInfo(FlightDescriptor request, ServerCallContext context)
-        {
-            throw new NotImplementedException();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Server/FlightServerRecordBatchStreamReader.cs b/csharp/src/Apache.Arrow.Flight/Server/FlightServerRecordBatchStreamReader.cs
deleted file mode 100644
index 5476d3d..0000000
--- a/csharp/src/Apache.Arrow.Flight/Server/FlightServerRecordBatchStreamReader.cs
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System.Threading.Tasks;
-using Apache.Arrow.Flight.Protocol;
-using Apache.Arrow.Flight.Internal;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Server
-{
-    public class FlightServerRecordBatchStreamReader : FlightRecordBatchStreamReader
-    {
-        internal FlightServerRecordBatchStreamReader(IAsyncStreamReader<FlightData> flightDataStream) : base(flightDataStream)
-        {
-        }
-
-        public ValueTask<FlightDescriptor> FlightDescriptor => GetFlightDescriptor();
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Server/FlightServerRecordBatchStreamWriter.cs b/csharp/src/Apache.Arrow.Flight/Server/FlightServerRecordBatchStreamWriter.cs
deleted file mode 100644
index 6c19873..0000000
--- a/csharp/src/Apache.Arrow.Flight/Server/FlightServerRecordBatchStreamWriter.cs
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using Apache.Arrow.Flight.Protocol;
-using Apache.Arrow.Flight.Internal;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Server
-{
-    public class FlightServerRecordBatchStreamWriter : FlightRecordBatchStreamWriter, IServerStreamWriter<RecordBatch>
-    {
-        internal FlightServerRecordBatchStreamWriter(IServerStreamWriter<FlightData> clientStreamWriter) : base(clientStreamWriter, null)
-        {
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow.Flight/Server/Internal/FlightServerImplementation.cs b/csharp/src/Apache.Arrow.Flight/Server/Internal/FlightServerImplementation.cs
deleted file mode 100644
index dcf6e57..0000000
--- a/csharp/src/Apache.Arrow.Flight/Server/Internal/FlightServerImplementation.cs
+++ /dev/null
@@ -1,100 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Text;
-using System.Threading.Tasks;
-using Apache.Arrow.Flight.Internal;
-using Apache.Arrow.Flight.Protocol;
-using Apache.Arrow.Flight.Server;
-using Grpc.Core;
-
-namespace Apache.Arrow.Flight.Server.Internal
-{
-    /// <summary>
-    /// This class has to be internal, since the generated code from proto is set as internal.
-    /// </summary>
-    internal class FlightServerImplementation : FlightService.FlightServiceBase
-    {
-        private readonly FlightServer _flightServer;
-        public FlightServerImplementation(FlightServer flightServer)
-        {
-            _flightServer = flightServer;
-        }
-
-        public override async Task DoPut(IAsyncStreamReader<FlightData> requestStream, IServerStreamWriter<Protocol.PutResult> responseStream, ServerCallContext context)
-        {
-            var readStream = new FlightServerRecordBatchStreamReader(requestStream);
-            var writeStream = new StreamWriter<FlightPutResult, Protocol.PutResult>(responseStream, putResult => putResult.ToProtocol());
-            await _flightServer.DoPut(readStream, writeStream, context).ConfigureAwait(false);
-        }
-
-        public override Task DoGet(Protocol.Ticket request, IServerStreamWriter<FlightData> responseStream, ServerCallContext context)
-        {
-            return _flightServer.DoGet(new FlightTicket(request.Ticket_), new FlightServerRecordBatchStreamWriter(responseStream), context);
-        }
-
-        public override Task ListFlights(Protocol.Criteria request, IServerStreamWriter<Protocol.FlightInfo> responseStream, ServerCallContext context)
-        {
-            var writeStream = new StreamWriter<FlightInfo, Protocol.FlightInfo>(responseStream, flightInfo => flightInfo.ToProtocol());
-            return _flightServer.ListFlights(new FlightCriteria(request), writeStream, context);
-        }
-
-        public override Task DoAction(Protocol.Action request, IServerStreamWriter<Protocol.Result> responseStream, ServerCallContext context)
-        {
-            var action = new FlightAction(request);
-            var writeStream = new StreamWriter<FlightResult, Protocol.Result>(responseStream, result => result.ToProtocol());
-            return _flightServer.DoAction(action, writeStream, context);
-        }
-
-        public override async Task<SchemaResult> GetSchema(Protocol.FlightDescriptor request, ServerCallContext context)
-        {
-            var flightDescriptor = new FlightDescriptor(request);
-            var schema = await _flightServer.GetSchema(flightDescriptor, context).ConfigureAwait(false);
-
-            return new SchemaResult()
-            {
-                Schema = SchemaWriter.SerializeSchema(schema)
-            };
-        }
-
-        public override async Task<Protocol.FlightInfo> GetFlightInfo(Protocol.FlightDescriptor request, ServerCallContext context)
-        {
-            var flightDescriptor = new FlightDescriptor(request);
-            var flightInfo = await _flightServer.GetFlightInfo(flightDescriptor, context).ConfigureAwait(false);
-
-            return flightInfo.ToProtocol();
-        }
-
-        public override Task DoExchange(IAsyncStreamReader<FlightData> requestStream, IServerStreamWriter<FlightData> responseStream, ServerCallContext context)
-        {
-            //Exchange is not yet implemented
-            throw new NotImplementedException();
-        }
-
-        public override Task Handshake(IAsyncStreamReader<HandshakeRequest> requestStream, IServerStreamWriter<HandshakeResponse> responseStream, ServerCallContext context)
-        {
-            //Handshake is not yet implemented
-            throw new NotImplementedException();
-        }
-
-        public override Task ListActions(Empty request, IServerStreamWriter<Protocol.ActionType> responseStream, ServerCallContext context)
-        {
-            var writeStream = new StreamWriter<FlightActionType, Protocol.ActionType>(responseStream, (actionType) => actionType.ToProtocol());
-            return _flightServer.ListActions(writeStream, context);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Apache.Arrow.csproj b/csharp/src/Apache.Arrow/Apache.Arrow.csproj
deleted file mode 100644
index 6257402..0000000
--- a/csharp/src/Apache.Arrow/Apache.Arrow.csproj
+++ /dev/null
@@ -1,42 +0,0 @@
-<Project Sdk="Microsoft.NET.Sdk">
-
-  <PropertyGroup>
-    <TargetFrameworks>netstandard1.3;netcoreapp2.1</TargetFrameworks>
-    <AllowUnsafeBlocks>true</AllowUnsafeBlocks>
-    <DefineConstants>$(DefineConstants);UNSAFE_BYTEBUFFER;BYTEBUFFER_NO_BOUNDS_CHECK;ENABLE_SPAN_T</DefineConstants>
-    
-    <Description>Apache Arrow is a cross-language development platform for in-memory data. It specifies a standardized language-independent columnar memory format for flat and hierarchical data, organized for efficient analytic operations on modern hardware.</Description>
-  </PropertyGroup>
-
-  <ItemGroup>
-    <PackageReference Include="System.Buffers" Version="4.5.0" />
-    <PackageReference Include="System.Memory" Version="4.5.2" />
-    <PackageReference Include="System.Runtime.CompilerServices.Unsafe" Version="4.5.2" />
-    <PackageReference Include="System.Threading.Tasks.Extensions" Version="4.5.2" />
-
-    <PackageReference Include="Microsoft.SourceLink.GitHub" Version="1.0.0" PrivateAssets="All" />
-  </ItemGroup>
-
-  <ItemGroup>
-    <Compile Update="Properties\Resources.Designer.cs">
-      <DesignTime>True</DesignTime>
-      <AutoGen>True</AutoGen>
-      <DependentUpon>Resources.resx</DependentUpon>
-    </Compile>
-  </ItemGroup>
-
-  <ItemGroup>
-    <EmbeddedResource Update="Properties\Resources.resx">
-      <Generator>ResXFileCodeGenerator</Generator>
-      <LastGenOutput>Resources.Designer.cs</LastGenOutput>
-    </EmbeddedResource>
-  </ItemGroup>
-
-  <ItemGroup Condition="'$(TargetFramework)' == 'netstandard1.3'">
-    <Compile Remove="Extensions\StreamExtensions.netcoreapp2.1.cs" />
-  </ItemGroup>
-  <ItemGroup Condition="'$(TargetFramework)' == 'netcoreapp2.1'">
-    <Compile Remove="Extensions\StreamExtensions.netstandard.cs" />
-    <Compile Remove="Extensions\TupleExtensions.netstandard.cs" />
-  </ItemGroup>
-</Project>
diff --git a/csharp/src/Apache.Arrow/Arrays/Array.cs b/csharp/src/Apache.Arrow/Arrays/Array.cs
deleted file mode 100644
index a453b08..0000000
--- a/csharp/src/Apache.Arrow/Arrays/Array.cs
+++ /dev/null
@@ -1,91 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Runtime.CompilerServices;
-
-namespace Apache.Arrow
-{
-    public abstract class Array : IArrowArray
-    {
-        public ArrayData Data { get; }
-
-        protected Array(ArrayData data)
-        {
-            Data = data ?? throw new ArgumentNullException(nameof(data));
-        }
-
-        public int Length => Data.Length;
-
-        public int Offset => Data.Offset;
-
-        public int NullCount => Data.NullCount;
-
-        public ArrowBuffer NullBitmapBuffer => Data.Buffers[0];
-
-        public virtual void Accept(IArrowArrayVisitor visitor)
-        {
-            Accept(this, visitor);
-        }
-
-        public bool IsValid(int index) =>
-            NullCount == 0 || NullBitmapBuffer.IsEmpty || BitUtility.GetBit(NullBitmapBuffer.Span, index + Offset);
-
-        public bool IsNull(int index) => !IsValid(index);
-
-        [MethodImpl(MethodImplOptions.AggressiveInlining)]
-        internal static void Accept<T>(T array, IArrowArrayVisitor visitor)
-            where T : class, IArrowArray
-        {
-            switch (visitor)
-            {
-                case IArrowArrayVisitor<T> typedVisitor:
-                    typedVisitor.Visit(array);
-                    break;
-                default:
-                    visitor.Visit(array);
-                    break;
-            }
-        }
-
-        public Array Slice(int offset, int length)
-        {
-            if (offset > Length)
-            {
-                throw new ArgumentException($"Offset {offset} cannot be greater than Length {Length} for Array.Slice");
-            }
-
-            length = Math.Min(Data.Length - offset, length);
-            offset += Data.Offset;
-
-            ArrayData newData = Data.Slice(offset, length);
-            return ArrowArrayFactory.BuildArray(newData) as Array;
-        }
-
-        public void Dispose()
-        {
-            Dispose(true);
-            GC.SuppressFinalize(this);
-        }
-
-        protected virtual void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                Data.Dispose();
-            }
-        }
-    }
-}
\ No newline at end of file
diff --git a/csharp/src/Apache.Arrow/Arrays/ArrayData.cs b/csharp/src/Apache.Arrow/Arrays/ArrayData.cs
deleted file mode 100644
index 93bb5cc..0000000
--- a/csharp/src/Apache.Arrow/Arrays/ArrayData.cs
+++ /dev/null
@@ -1,92 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-using System.Collections.Generic;
-using System.Linq;
-
-namespace Apache.Arrow
-{
-    public sealed class ArrayData : IDisposable
-    {
-        private const int RecalculateNullCount = -1;
-
-        public readonly IArrowType DataType;
-        public readonly int Length;
-        public readonly int NullCount;
-        public readonly int Offset;
-        public readonly ArrowBuffer[] Buffers;
-        public readonly ArrayData[] Children;
-
-        public ArrayData(
-            IArrowType dataType,
-            int length, int nullCount = 0, int offset = 0,
-            IEnumerable<ArrowBuffer> buffers = null, IEnumerable<ArrayData> children = null)
-        {
-            DataType = dataType ?? NullType.Default;
-            Length = length;
-            NullCount = nullCount;
-            Offset = offset;
-            Buffers = buffers?.ToArray();
-            Children = children?.ToArray();
-        }
-
-        public ArrayData(
-            IArrowType dataType,
-            int length, int nullCount = 0, int offset = 0,
-            ArrowBuffer[] buffers = null, ArrayData[] children = null)
-        {
-            DataType = dataType ?? NullType.Default;
-            Length = length;
-            NullCount = nullCount;
-            Offset = offset;
-            Buffers = buffers;
-            Children = children;
-        }
-
-        public void Dispose()
-        {
-            if (Buffers != null)
-            {
-                foreach (ArrowBuffer buffer in Buffers)
-                {
-                    buffer.Dispose();
-                }
-            }
-
-            if (Children != null)
-            {
-                foreach (ArrayData child in Children)
-                {
-                    child?.Dispose();
-                }
-            }
-        }
-
-        public ArrayData Slice(int offset, int length)
-        {
-            if (offset > Length)
-            {
-                throw new ArgumentException($"Offset {offset} cannot be greater than Length {Length} for Array.Slice");
-            }
-
-            length = Math.Min(Length - offset, length);
-            offset += Offset;
-
-            return new ArrayData(DataType, length, RecalculateNullCount, offset, Buffers, Children);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/ArrowArrayBuilderFactory.cs b/csharp/src/Apache.Arrow/Arrays/ArrowArrayBuilderFactory.cs
deleted file mode 100644
index e736094..0000000
--- a/csharp/src/Apache.Arrow/Arrays/ArrowArrayBuilderFactory.cs
+++ /dev/null
@@ -1,79 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-
-namespace Apache.Arrow
-{
-    static class ArrowArrayBuilderFactory
-    {
-        internal static IArrowArrayBuilder<IArrowArray, IArrowArrayBuilder<IArrowArray>> Build(IArrowType dataType)
-        {
-            switch (dataType.TypeId)
-            {
-                case ArrowTypeId.Boolean:
-                    return new BooleanArray.Builder();
-                case ArrowTypeId.UInt8:
-                    return new UInt8Array.Builder();
-                case ArrowTypeId.Int8:
-                    return new Int8Array.Builder();
-                case ArrowTypeId.UInt16:
-                    return new UInt16Array.Builder();
-                case ArrowTypeId.Int16:
-                    return new Int16Array.Builder();
-                case ArrowTypeId.UInt32:
-                    return new UInt32Array.Builder();
-                case ArrowTypeId.Int32:
-                    return new Int32Array.Builder();
-                case ArrowTypeId.UInt64:
-                    return new UInt64Array.Builder();
-                case ArrowTypeId.Int64:
-                    return new Int64Array.Builder();
-                case ArrowTypeId.Float:
-                    return new FloatArray.Builder();
-                case ArrowTypeId.Double:
-                    return new DoubleArray.Builder();
-                case ArrowTypeId.String:
-                    return new StringArray.Builder();
-                case ArrowTypeId.Binary:
-                    return new BinaryArray.Builder();
-                case ArrowTypeId.Timestamp:
-                    return new TimestampArray.Builder();
-                case ArrowTypeId.Date64:
-                    return new Date64Array.Builder();
-                case ArrowTypeId.Date32:
-                    return new Date32Array.Builder();
-                case ArrowTypeId.List:
-                    return new ListArray.Builder(dataType as ListType);
-                case ArrowTypeId.Decimal128:
-                    return new Decimal128Array.Builder(dataType as Decimal128Type);
-                case ArrowTypeId.Decimal256:
-                    return new Decimal256Array.Builder(dataType as Decimal256Type);
-                case ArrowTypeId.Struct:
-                case ArrowTypeId.Union:
-                case ArrowTypeId.Dictionary:
-                case ArrowTypeId.FixedSizedBinary:
-                case ArrowTypeId.HalfFloat:
-                case ArrowTypeId.Interval:
-                case ArrowTypeId.Map:
-                case ArrowTypeId.Time32:
-                case ArrowTypeId.Time64:
-                default:
-                    throw new NotSupportedException($"An ArrowArrayBuilder cannot be built for type {dataType.TypeId}.");
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/ArrowArrayFactory.cs b/csharp/src/Apache.Arrow/Arrays/ArrowArrayFactory.cs
deleted file mode 100644
index c342923..0000000
--- a/csharp/src/Apache.Arrow/Arrays/ArrowArrayFactory.cs
+++ /dev/null
@@ -1,81 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-
-namespace Apache.Arrow
-{
-    public static class ArrowArrayFactory
-    {
-        public static IArrowArray BuildArray(ArrayData data)
-        {
-            switch (data.DataType.TypeId)
-            {
-                case ArrowTypeId.Boolean:
-                    return new BooleanArray(data);
-                case ArrowTypeId.UInt8:
-                    return new UInt8Array(data);
-                case ArrowTypeId.Int8:
-                    return new Int8Array(data);
-                case ArrowTypeId.UInt16:
-                    return new UInt16Array(data);
-                case ArrowTypeId.Int16:
-                    return new Int16Array(data);
-                case ArrowTypeId.UInt32:
-                    return new UInt32Array(data);
-                case ArrowTypeId.Int32:
-                    return new Int32Array(data);
-                case ArrowTypeId.UInt64:
-                    return new UInt64Array(data);
-                case ArrowTypeId.Int64:
-                    return new Int64Array(data);
-                case ArrowTypeId.Float:
-                    return new FloatArray(data);
-                case ArrowTypeId.Double:
-                    return new DoubleArray(data);
-                case ArrowTypeId.String:
-                    return new StringArray(data);
-                case ArrowTypeId.Binary:
-                    return new BinaryArray(data);
-                case ArrowTypeId.Timestamp:
-                    return new TimestampArray(data);
-                case ArrowTypeId.List:
-                    return new ListArray(data);
-                case ArrowTypeId.Struct:
-                    return new StructArray(data);
-                case ArrowTypeId.Union:
-                    return new UnionArray(data);
-                case ArrowTypeId.Date64:
-                    return new Date64Array(data);
-                case ArrowTypeId.Date32:
-                    return new Date32Array(data);
-                case ArrowTypeId.Decimal128:
-                    return new Decimal128Array(data);
-                case ArrowTypeId.Decimal256:
-                    return new Decimal256Array(data);
-                case ArrowTypeId.Dictionary:
-                case ArrowTypeId.FixedSizedBinary:
-                case ArrowTypeId.HalfFloat:
-                case ArrowTypeId.Interval:
-                case ArrowTypeId.Map:
-                case ArrowTypeId.Time32:
-                case ArrowTypeId.Time64:
-                default:
-                    throw new NotSupportedException($"An ArrowArray cannot be built for type {data.DataType.TypeId}.");
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/ArrowArrayVisitor.cs b/csharp/src/Apache.Arrow/Arrays/ArrowArrayVisitor.cs
deleted file mode 100644
index fc56b66..0000000
--- a/csharp/src/Apache.Arrow/Arrays/ArrowArrayVisitor.cs
+++ /dev/null
@@ -1,22 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-namespace Apache.Arrow
-{
-    public abstract class ArrowArrayVisitor : IArrowArrayVisitor
-    {
-        public virtual void Visit(IArrowArray array) { }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/BinaryArray.cs b/csharp/src/Apache.Arrow/Arrays/BinaryArray.cs
deleted file mode 100644
index 4fd8059..0000000
--- a/csharp/src/Apache.Arrow/Arrays/BinaryArray.cs
+++ /dev/null
@@ -1,358 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-using System.Collections.Generic;
-using System.Runtime.CompilerServices;
-using Apache.Arrow.Memory;
-
-namespace Apache.Arrow
-{
-    public class BinaryArray : Array
-    {
-        public class Builder : BuilderBase<BinaryArray, Builder>
-        {
-            public Builder() : base(BinaryType.Default) { }
-            public Builder(IArrowType dataType) : base(dataType) { }
-
-            protected override BinaryArray Build(ArrayData data)
-            {
-                return new BinaryArray(data);
-            }
-        }
-
-        public BinaryArray(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Binary);
-            data.EnsureBufferCount(3);
-        }
-
-        public BinaryArray(ArrowTypeId typeId, ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(typeId);
-            data.EnsureBufferCount(3);
-        }
-
-        public abstract class BuilderBase<TArray, TBuilder> : IArrowArrayBuilder<byte, TArray, TBuilder>
-            where TArray : IArrowArray
-            where TBuilder : class, IArrowArrayBuilder<byte, TArray, TBuilder>
-        {
-            protected IArrowType DataType { get; }
-            protected TBuilder Instance => this as TBuilder;
-            protected ArrowBuffer.Builder<int> ValueOffsets { get; }
-            protected ArrowBuffer.Builder<byte> ValueBuffer { get; }
-            protected ArrowBuffer.BitmapBuilder ValidityBuffer { get; }
-            protected int Offset { get; set; }
-            protected int NullCount => this.ValidityBuffer.UnsetBitCount;
-
-            protected BuilderBase(IArrowType dataType)
-            {
-                DataType = dataType;
-                ValueOffsets = new ArrowBuffer.Builder<int>();
-                ValueBuffer = new ArrowBuffer.Builder<byte>();
-                ValidityBuffer = new ArrowBuffer.BitmapBuilder();
-
-                // From the docs:
-                //
-                // The offsets buffer contains length + 1 signed integers (either 32-bit or 64-bit, depending on the
-                // logical type), which encode the start position of each slot in the data buffer. The length of the
-                // value in each slot is computed using the difference between the offset at that slot’s index and the
-                // subsequent offset.
-                //
-                // In this builder, we choose to append the first offset (zero) upon construction, and each trailing
-                // offset is then added after each individual item has been appended.
-                ValueOffsets.Append(this.Offset);
-            }
-
-            protected abstract TArray Build(ArrayData data);
-
-            /// <summary>
-            /// Gets the length of the array built so far.
-            /// </summary>
-            public int Length => ValueOffsets.Length - 1;
-
-            /// <summary>
-            /// Build an Arrow array from the appended contents so far.
-            /// </summary>
-            /// <param name="allocator">Optional memory allocator.</param>
-            /// <returns>Returns an array of type <typeparamref name="TArray"/>.</returns>
-            public TArray Build(MemoryAllocator allocator = default)
-            {
-                var bufs = new[]
-                {
-                    NullCount > 0 ? ValidityBuffer.Build(allocator) : ArrowBuffer.Empty,
-                    ValueOffsets.Build(allocator),
-                    ValueBuffer.Build(allocator),
-                };
-                var data = new ArrayData(
-                    DataType,
-                    length: Length,
-                    NullCount,
-                    offset: 0,
-                    bufs);
-
-                return Build(data);
-            }
-
-            /// <summary>
-            /// Append a single null value to the array.
-            /// </summary>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public TBuilder AppendNull()
-            {
-                // Do not add to the value buffer in the case of a null.
-                // Note that we do not need to increment the offset as a result.
-                ValidityBuffer.Append(false);
-                ValueOffsets.Append(Offset);
-                return Instance;
-            }
-
-            /// <summary>
-            /// Appends a value, consisting of a single byte, to the array.
-            /// </summary>
-            /// <param name="value">Byte value to append.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public TBuilder Append(byte value)
-            {
-                ValueBuffer.Append(value);
-                ValidityBuffer.Append(true);
-                Offset++;
-                ValueOffsets.Append(Offset);
-                return Instance;
-            }
-
-            /// <summary>
-            /// Append a value, consisting of a span of bytes, to the array.
-            /// </summary>
-            /// <remarks>
-            /// Note that a single value is added, which consists of arbitrarily many bytes.  If multiple values are
-            /// to be added, use the <see cref="AppendRange"/> method.
-            /// </remarks>
-            /// <param name="span">Span of bytes to add.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public TBuilder Append(ReadOnlySpan<byte> span)
-            {
-                ValueBuffer.Append(span);
-                ValidityBuffer.Append(true);
-                Offset += span.Length;
-                ValueOffsets.Append(Offset);
-                return Instance;
-            }
-
-            /// <summary>
-            /// Append a value, consisting of an enumerable collection of bytes, to the array.
-            /// </summary>
-            /// <remarks>
-            /// Note that this method appends a single value, which may consist of arbitrarily many bytes.  If multiple
-            /// values are to be added, use the <see cref="AppendRange(IEnumerable{byte})"/> method instead.
-            /// </remarks>
-            /// <param name="value">Enumerable collection of bytes to add.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public TBuilder Append(IEnumerable<byte> value)
-            {
-                if (value == null)
-                {
-                    return AppendNull();
-                }
-
-                // Note: by looking at the length of the value buffer before and after, we avoid having to iterate
-                // through the enumerable multiple times to get both length and contents.
-                int priorLength = ValueBuffer.Length;
-                ValueBuffer.AppendRange(value);
-                int valueLength = ValueBuffer.Length - priorLength;
-                Offset += valueLength;
-                ValidityBuffer.Append(true);
-                ValueOffsets.Append(Offset);
-                return Instance;
-            }
-
-            /// <summary>
-            /// Append an enumerable collection of single-byte values to the array.
-            /// </summary>
-            /// <remarks>
-            /// Note that this method appends multiple values, each of which is a single byte.  If a single value is
-            /// to be added, use the <see cref="Append(IEnumerable{byte})"/> method instead.
-            /// </remarks>
-            /// <param name="values">Single-byte values to add.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public TBuilder AppendRange(IEnumerable<byte> values)
-            {
-                if (values == null)
-                {
-                    throw new ArgumentNullException(nameof(values));
-                }
-
-                foreach (byte b in values)
-                {
-                    Append(b);
-                }
-
-                return Instance;
-            }
-
-            /// <summary>
-            /// Append an enumerable collection of values to the array.
-            /// </summary>
-            /// <param name="values">Values to add.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public TBuilder AppendRange(IEnumerable<byte[]> values)
-            {
-                if (values == null)
-                {
-                    throw new ArgumentNullException(nameof(values));
-                }
-
-                foreach (byte[] arr in values)
-                {
-                    if (arr == null)
-                    {
-                        AppendNull();
-                    }
-                    else
-                    {
-                        Append((ReadOnlySpan<byte>)arr);
-                    }
-                }
-
-                return Instance;
-            }
-
-            public TBuilder Reserve(int capacity)
-            {
-                // TODO: [ARROW-9366] Reserve capacity in the value buffer in a more sensible way.
-                ValueOffsets.Reserve(capacity + 1);
-                ValueBuffer.Reserve(capacity);
-                ValidityBuffer.Reserve(capacity + 1);
-                return Instance;
-            }
-
-            public TBuilder Resize(int length)
-            {
-                // TODO: [ARROW-9366] Resize the value buffer to a safe length based on offsets, not `length`.
-                ValueOffsets.Resize(length + 1);
-                ValueBuffer.Resize(length);
-                ValidityBuffer.Resize(length + 1);
-                return Instance;
-            }
-
-            public TBuilder Swap(int i, int j)
-            {
-                // TODO: Implement
-                throw new NotImplementedException();
-            }
-
-            public TBuilder Set(int index, byte value)
-            {
-                // TODO: Implement
-                throw new NotImplementedException();
-            }
-
-            /// <summary>
-            /// Clear all contents appended so far.
-            /// </summary>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public TBuilder Clear()
-            {
-                ValueOffsets.Clear();
-                ValueBuffer.Clear();
-                ValidityBuffer.Clear();
-
-                // Always write the first offset before anything has been written.
-                Offset = 0;
-                ValueOffsets.Append(Offset);
-                return Instance;
-            }
-        }
-
-        public BinaryArray(IArrowType dataType, int length,
-            ArrowBuffer valueOffsetsBuffer,
-            ArrowBuffer dataBuffer,
-            ArrowBuffer nullBitmapBuffer,
-            int nullCount = 0, int offset = 0)
-        : this(new ArrayData(dataType, length, nullCount, offset,
-            new[] { nullBitmapBuffer, valueOffsetsBuffer, dataBuffer }))
-        { }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-        public ArrowBuffer ValueOffsetsBuffer => Data.Buffers[1];
-
-        public ArrowBuffer ValueBuffer => Data.Buffers[2];
-
-        public ReadOnlySpan<int> ValueOffsets => ValueOffsetsBuffer.Span.CastTo<int>().Slice(Offset, Length + 1);
-
-        public ReadOnlySpan<byte> Values => ValueBuffer.Span.CastTo<byte>();
-
-        [MethodImpl(MethodImplOptions.AggressiveInlining)]
-        [Obsolete("This method has been deprecated. Please use ValueOffsets[index] instead.")]
-        public int GetValueOffset(int index)
-        {
-            if (index < 0 || index > Length)
-            {
-                throw new ArgumentOutOfRangeException(nameof(index));
-            }
-            return ValueOffsets[index];
-        }
-
-        [MethodImpl(MethodImplOptions.AggressiveInlining)]
-        public int GetValueLength(int index)
-        {
-            if (index < 0 || index >= Length)
-            {
-                throw new ArgumentOutOfRangeException(nameof(index));
-            }
-            if (!IsValid(index))
-            {
-                return 0;
-            }
-
-            ReadOnlySpan<int> offsets = ValueOffsets;
-            return offsets[index + 1] - offsets[index];
-        }
-
-        /// <summary>
-        /// Get the collection of bytes, as a read-only span, at a given index in the array.
-        /// </summary>
-        /// <remarks>
-        /// Note that this method cannot reliably identify null values, which are indistinguishable from empty byte
-        /// collection values when seen in the context of this method's return type of <see cref="ReadOnlySpan{Byte}"/>.
-        /// Use the <see cref="Array.IsNull"/> method instead to reliably determine null values.
-        /// </remarks>
-        /// <param name="index">Index at which to get bytes.</param>
-        /// <returns>Returns a <see cref="ReadOnlySpan{Byte}"/> object.</returns>
-        /// <exception cref="ArgumentOutOfRangeException">If the index is negative or beyond the length of the array.
-        /// </exception>
-        public ReadOnlySpan<byte> GetBytes(int index)
-        {
-            if (index < 0 || index >= Length)
-            {
-                throw new ArgumentOutOfRangeException(nameof(index));
-            }
-
-            if (IsNull(index))
-            {
-                // Note that `return null;` is valid syntax, but would be misleading as `null` in the context of a span
-                // is actually returned as an empty span.
-                return ReadOnlySpan<byte>.Empty;
-            }
-
-            return ValueBuffer.Span.Slice(ValueOffsets[index], GetValueLength(index));
-        }
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/BooleanArray.cs b/csharp/src/Apache.Arrow/Arrays/BooleanArray.cs
deleted file mode 100644
index 0915338..0000000
--- a/csharp/src/Apache.Arrow/Arrays/BooleanArray.cs
+++ /dev/null
@@ -1,194 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Memory;
-using Apache.Arrow.Types;
-using System;
-using System.Collections.Generic;
-
-namespace Apache.Arrow
-{
-    public class BooleanArray: Array
-    {
-        public class Builder : IArrowArrayBuilder<bool, BooleanArray, Builder>
-        {
-            private ArrowBuffer.BitmapBuilder ValueBuffer { get; }
-            private ArrowBuffer.BitmapBuilder ValidityBuffer { get; }
-
-            public int Length => ValueBuffer.Length;
-            public int Capacity => ValueBuffer.Capacity;
-            public int NullCount => ValidityBuffer.UnsetBitCount;
-
-            public Builder()
-            {
-                ValueBuffer = new ArrowBuffer.BitmapBuilder();
-                ValidityBuffer = new ArrowBuffer.BitmapBuilder();
-            }
-
-            public Builder Append(bool value)
-            {
-                return NullableAppend(value);
-            }
-
-            public Builder NullableAppend(bool? value)
-            {
-                // Note that we rely on the fact that null values are false in the value buffer.
-                ValueBuffer.Append(value ?? false);
-                ValidityBuffer.Append(value.HasValue);
-                return this;
-            }
-
-            public Builder Append(ReadOnlySpan<bool> span)
-            {
-                foreach (bool value in span)
-                {
-                    Append(value);
-                }
-                return this;
-            }
-
-            public Builder AppendRange(IEnumerable<bool> values)
-            {
-                foreach (bool value in values)
-                {
-                    Append(value);
-                }
-                return this;
-            }
-
-            public Builder AppendNull()
-            {
-                return NullableAppend(null);
-            }
-
-            public BooleanArray Build(MemoryAllocator allocator = default)
-            {
-                ArrowBuffer validityBuffer = NullCount > 0
-                                        ? ValidityBuffer.Build(allocator)
-                                        : ArrowBuffer.Empty;
-
-                return new BooleanArray(
-                    ValueBuffer.Build(allocator), validityBuffer,
-                    Length, NullCount, 0);
-            }
-
-            public Builder Clear()
-            {
-                ValueBuffer.Clear();
-                ValidityBuffer.Clear();
-                return this;
-            }
-
-            public Builder Reserve(int capacity)
-            {
-                if (capacity < 0)
-                {
-                    throw new ArgumentOutOfRangeException(nameof(capacity));
-                }
-
-                ValueBuffer.Reserve(capacity);
-                ValidityBuffer.Reserve(capacity);
-                return this;
-            }
-
-            public Builder Resize(int length)
-            {
-                if (length < 0)
-                {
-                    throw new ArgumentOutOfRangeException(nameof(length));
-                }
-
-                ValueBuffer.Resize(length);
-                ValidityBuffer.Resize(length);
-                return this;
-            }
-
-            public Builder Toggle(int index)
-            {
-                CheckIndex(index);
-
-                // If there is a null at this index, assume it was set to false in the value buffer, and so becomes
-                // true/non-null after toggling.
-                ValueBuffer.Toggle(index);
-                ValidityBuffer.Set(index);
-                return this;
-            }
-
-            public Builder Set(int index)
-            {
-                CheckIndex(index);
-                ValueBuffer.Set(index);
-                ValidityBuffer.Set(index);
-                return this;
-            }
-
-            public Builder Set(int index, bool value)
-            {
-                CheckIndex(index);
-                ValueBuffer.Set(index, value);
-                ValidityBuffer.Set(index);
-                return this;
-            }
-
-            public Builder Swap(int i, int j)
-            {
-                CheckIndex(i);
-                CheckIndex(j);
-                ValueBuffer.Swap(i, j);
-                ValidityBuffer.Swap(i, j);
-                return this;
-            }
-
-            private void CheckIndex(int index)
-            {
-                if (index < 0 || index >= Length)
-                {
-                    throw new ArgumentOutOfRangeException(nameof(index));
-                }
-            }
-        }
-
-        public ArrowBuffer ValueBuffer => Data.Buffers[1];
-        public ReadOnlySpan<byte> Values => ValueBuffer.Span.Slice(0, (int) Math.Ceiling(Length / 8.0));
-
-        public BooleanArray(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(BooleanType.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public BooleanArray(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Boolean);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-        [Obsolete("GetBoolean does not support null values. Use GetValue instead (which this method invokes internally).")]
-        public bool GetBoolean(int index)
-        {
-            return GetValue(index).GetValueOrDefault();
-        }
-
-        public bool? GetValue(int index)
-        {
-            return IsNull(index)
-                ? (bool?)null
-                : BitUtility.GetBit(ValueBuffer.Span, index + Offset);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/Date32Array.cs b/csharp/src/Apache.Arrow/Arrays/Date32Array.cs
deleted file mode 100644
index 35c0065..0000000
--- a/csharp/src/Apache.Arrow/Arrays/Date32Array.cs
+++ /dev/null
@@ -1,112 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-
-namespace Apache.Arrow
-{
-    /// <summary>
-    /// The <see cref="Date32Array"/> class holds an array of dates in the <c>Date32</c> format, where each date is
-    /// stored as the number of days since the dawn of (UNIX) time.
-    /// </summary>
-    public class Date32Array : PrimitiveArray<int>
-    {
-        private static readonly DateTime _epochDate = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Unspecified);
-
-        /// <summary>
-        /// The <see cref="Builder"/> class can be used to fluently build <see cref="Date32Array"/> objects.
-        /// </summary>
-        public class Builder : DateArrayBuilder<int, Date32Array, Builder>
-        {
-            private class DateBuilder : PrimitiveArrayBuilder<int, Date32Array, DateBuilder>
-            {
-                protected override Date32Array Build(
-                    ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                    int length, int nullCount, int offset) =>
-                    new Date32Array(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-            }
-
-            /// <summary>
-            /// Construct a new instance of the <see cref="Builder"/> class.
-            /// </summary>
-            public Builder() : base(new DateBuilder()) { }
-
-            protected override int Convert(DateTime dateTime)
-            {
-                return (int)(dateTime.Date - _epochDate).TotalDays;
-            }
-
-            protected override int Convert(DateTimeOffset dateTimeOffset)
-            {
-                // The internal value stored for a DateTimeOffset can be thought of as the number of 24-hour "blocks"
-                // of time that have elapsed since the UNIX epoch.  This is the same as converting it to UTC first and
-                // then taking the date element from that.  It is not the same as what would result from looking at the
-                // DateTimeOffset.Date property.
-                return (int)(dateTimeOffset.UtcDateTime.Date - _epochDate).TotalDays;
-            }
-        }
-
-        public Date32Array(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(Date32Type.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public Date32Array(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Date32);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-        [Obsolete("Use `GetDateTimeOffset()` instead")]
-        public DateTimeOffset? GetDate(int index) => GetDateTimeOffset(index);
-
-        /// <summary>
-        /// Get the date at the specified index in the form of a <see cref="DateTime"/> object.
-        /// </summary>
-        /// <remarks>
-        /// The <see cref="DateTime.Kind"/> property of the returned object is set to
-        /// <see cref="DateTimeKind.Unspecified"/>.
-        /// </remarks>
-        /// <param name="index">Index at which to get the date.</param>
-        /// <returns>Returns a <see cref="DateTime"/> object, or <c>null</c> if there is no object at that index.
-        /// </returns>
-        public DateTime? GetDateTime(int index)
-        {
-            int? value = GetValue(index);
-            return value.HasValue
-                ? _epochDate.AddDays(value.Value)
-                : default(DateTime?);
-        }
-
-        /// <summary>
-        /// Get the date at the specified index in the form of a <see cref="DateTimeOffset"/> object.
-        /// </summary>
-        /// <param name="index">Index at which to get the date.</param>
-        /// <returns>Returns a <see cref="DateTimeOffset"/> object, or <c>null</c> if there is no object at that index.
-        /// </returns>
-        public DateTimeOffset? GetDateTimeOffset(int index)
-        {
-            int? value = GetValue(index);
-            return value.HasValue
-                ? new DateTimeOffset(_epochDate.AddDays(value.Value), TimeSpan.Zero)
-                : default(DateTimeOffset?);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/Date64Array.cs b/csharp/src/Apache.Arrow/Arrays/Date64Array.cs
deleted file mode 100644
index cf977b2..0000000
--- a/csharp/src/Apache.Arrow/Arrays/Date64Array.cs
+++ /dev/null
@@ -1,117 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-
-namespace Apache.Arrow
-{
-    /// <summary>
-    /// The <see cref="Date64Array"/> class holds an array of dates in the <c>Date64</c> format, where each date is
-    /// stored as the number of milliseconds since the dawn of (UNIX) time, excluding leap seconds, in multiples of
-    /// 86400000.
-    /// </summary>
-    public class Date64Array: PrimitiveArray<long>
-    {
-        private const long MillisecondsPerDay = 86400000;
-
-        public Date64Array(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(Date64Type.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        /// <summary>
-        /// The <see cref="Builder"/> class can be used to fluently build <see cref="Date64Array"/> objects.
-        /// </summary>
-        public class Builder : DateArrayBuilder<long, Date64Array, Builder>
-        {
-            private class DateBuilder: PrimitiveArrayBuilder<long, Date64Array, DateBuilder>
-            {
-                protected override Date64Array Build(
-                    ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                    int length, int nullCount, int offset) =>
-                    new Date64Array(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-            }
-
-            /// <summary>
-            /// Construct a new instance of the <see cref="Builder"/> class.
-            /// </summary>
-            public Builder() : base(new DateBuilder()) { }
-
-            protected override long Convert(DateTime dateTime)
-            {
-                var dateTimeOffset = new DateTimeOffset(
-                    DateTime.SpecifyKind(dateTime.Date, DateTimeKind.Unspecified),
-                    TimeSpan.Zero);
-                return dateTimeOffset.ToUnixTimeMilliseconds();
-            }
-
-            protected override long Convert(DateTimeOffset dateTimeOffset)
-            {
-                // The internal value stored for a DateTimeOffset can be thought of as the number of milliseconds,
-                // in multiples of 86400000, that have passed since the UNIX epoch.  It is not the same as what would
-                // result from encoding the date from the DateTimeOffset.Date property.
-                long millis = dateTimeOffset.ToUnixTimeMilliseconds();
-                long days = millis / MillisecondsPerDay;
-                return (millis < 0 ? days - 1 : days) * MillisecondsPerDay;
-            }
-        }
-
-        public Date64Array(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Date64);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-        [Obsolete("Use `GetDateTimeOffset()` instead")]
-        public DateTimeOffset? GetDate(int index) => GetDateTimeOffset(index);
-
-        /// <summary>
-        /// Get the date at the specified index in the form of a <see cref="DateTime"/> object.
-        /// </summary>
-        /// <remarks>
-        /// The <see cref="DateTime.Kind"/> property of the returned object is set to
-        /// <see cref="DateTimeKind.Unspecified"/>.
-        /// </remarks>
-        /// <param name="index">Index at which to get the date.</param>
-        /// <returns>Returns a <see cref="DateTime"/> object, or <c>null</c> if there is no object at that index.
-        /// </returns>
-        public DateTime? GetDateTime(int index)
-        {
-            long? value = GetValue(index);
-            return value.HasValue
-                ? DateTimeOffset.FromUnixTimeMilliseconds(value.Value).Date
-                : default(DateTime?);
-        }
-
-        /// <summary>
-        /// Get the date at the specified index in the form of a <see cref="DateTimeOffset"/> object.
-        /// </summary>
-        /// <param name="index">Index at which to get the date.</param>
-        /// <returns>Returns a <see cref="DateTimeOffset"/> object, or <c>null</c> if there is no object at that index.
-        /// </returns>
-        public DateTimeOffset? GetDateTimeOffset(int index)
-        {
-            long? value = GetValue(index);
-            return value.HasValue
-                ? DateTimeOffset.FromUnixTimeMilliseconds(value.Value)
-                : default(DateTimeOffset?);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/DateArrayBuilder.cs b/csharp/src/Apache.Arrow/Arrays/DateArrayBuilder.cs
deleted file mode 100644
index 4e69f6f..0000000
--- a/csharp/src/Apache.Arrow/Arrays/DateArrayBuilder.cs
+++ /dev/null
@@ -1,209 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Linq;
-
-namespace Apache.Arrow
-{
-    /// <summary>
-    /// The <see cref="DateArrayBuilder{TUnderlying,TArray,TBuilder}"/> class is an abstract array builder that can
-    /// accept dates in the form of <see cref="DateTime"/> or <see cref="DateTimeOffset"/> and convert to some
-    /// underlying date representation.
-    /// </summary>
-    public abstract class DateArrayBuilder<TUnderlying, TArray, TBuilder> :
-        DelegatingArrayBuilder<TUnderlying, TArray, TBuilder>,
-        IArrowArrayBuilder<DateTime, TArray, TBuilder>,
-        IArrowArrayBuilder<DateTimeOffset, TArray, TBuilder>
-        where TArray : IArrowArray
-        where TBuilder : class, IArrowArrayBuilder<TArray>
-    {
-        /// <summary>
-        /// Construct a new instance of the <see cref="DateArrayBuilder{TUnderlying,TArray,TBuilder}"/> class.
-        /// </summary>
-        /// <param name="innerBuilder">Inner builder that will produce arrays of type <typeparamref name="TArray"/>.
-        /// </param>
-        protected DateArrayBuilder(IArrowArrayBuilder<TUnderlying, TArray, IArrowArrayBuilder<TArray>> innerBuilder)
-            : base(innerBuilder)
-        { }
-
-        /// <summary>
-        /// Append a date in the form of a <see cref="DateTime"/> object to the array.
-        /// </summary>
-        /// <remarks>
-        /// The value of <see cref="DateTime.Kind"/> on the input does not have any effect on the behaviour of this
-        /// method.
-        /// </remarks>
-        /// <param name="value">Date to add.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder Append(DateTime value)
-        {
-            InnerBuilder.Append(Convert(value));
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Append a date from a <see cref="DateTimeOffset"/> object to the array.
-        /// </summary>
-        /// <remarks>
-        /// Note that to convert the supplied <paramref name="value"/> parameter to a date, it is first converted to
-        /// UTC and the date then taken from the UTC date/time.  Depending on the value of its
-        /// <see cref="DateTimeOffset.Offset"/> property, this may not necessarily be the same as the date obtained by
-        /// calling its <see cref="DateTimeOffset.Date"/> property.
-        /// </remarks>
-        /// <param name="value">Date to add.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder Append(DateTimeOffset value)
-        {
-            InnerBuilder.Append(Convert(value));
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Append a span of dates in the form of <see cref="DateTime"/> objects to the array.
-        /// </summary>
-        /// <remarks>
-        /// The value of <see cref="DateTime.Kind"/> on any of the inputs does not have any effect on the behaviour of
-        /// this method.
-        /// </remarks>
-        /// <param name="span">Span of dates to add.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder Append(ReadOnlySpan<DateTime> span)
-        {
-            InnerBuilder.Reserve(span.Length);
-            foreach (var item in span)
-            {
-                InnerBuilder.Append(Convert(item));
-            }
-
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Append a span of dates in the form of <see cref="DateTimeOffset"/> objects to the array.
-        /// </summary>
-        /// <remarks>
-        /// Note that to convert the <see cref="DateTimeOffset"/> objects in the <paramref name="span"/> parameter to
-        /// dates, they are first converted to UTC and the date then taken from the UTC date/times.  Depending on the
-        /// value of each <see cref="DateTimeOffset.Offset"/> property, this may not necessarily be the same as the
-        /// date obtained by calling the <see cref="DateTimeOffset.Date"/> property.
-        /// </remarks>
-        /// <param name="span">Span of dates to add.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder Append(ReadOnlySpan<DateTimeOffset> span)
-        {
-            InnerBuilder.Reserve(span.Length);
-            foreach (var item in span)
-            {
-                InnerBuilder.Append(Convert(item));
-            }
-
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Append a null date to the array.
-        /// </summary>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder AppendNull()
-        {
-            InnerBuilder.AppendNull();
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Append a collection of dates in the form of <see cref="DateTime"/> objects to the array.
-        /// </summary>
-        /// <remarks>
-        /// The value of <see cref="DateTime.Kind"/> on any of the inputs does not have any effect on the behaviour of
-        /// this method.
-        /// </remarks>
-        /// <param name="values">Collection of dates to add.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder AppendRange(IEnumerable<DateTime> values)
-        {
-            InnerBuilder.AppendRange(values.Select(Convert));
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Append a collection of dates in the form of <see cref="DateTimeOffset"/> objects to the array.
-        /// </summary>
-        /// <remarks>
-        /// Note that to convert the <see cref="DateTimeOffset"/> objects in the <paramref name="values"/> parameter to
-        /// dates, they are first converted to UTC and the date then taken from the UTC date/times.  Depending on the
-        /// value of each <see cref="DateTimeOffset.Offset"/> property, this may not necessarily be the same as the
-        /// date obtained by calling the <see cref="DateTimeOffset.Date"/> property.
-        /// </remarks>
-        /// <param name="values">Collection of dates to add.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder AppendRange(IEnumerable<DateTimeOffset> values)
-        {
-            InnerBuilder.AppendRange(values.Select(Convert));
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Set the value of a date in the form of a <see cref="DateTime"/> object at the specified index.
-        /// </summary>
-        /// <remarks>
-        /// The value of <see cref="DateTime.Kind"/> on the input does not have any effect on the behaviour of this
-        /// method.
-        /// </remarks>
-        /// <param name="index">Index at which to set value.</param>
-        /// <param name="value">Date to set.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder Set(int index, DateTime value)
-        {
-            InnerBuilder.Set(index, Convert(value));
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Set the value of a date in the form of a <see cref="DateTimeOffset"/> object at the specified index.
-        /// </summary>
-        /// <remarks>
-        /// Note that to convert the supplied <paramref name="value"/> parameter to a date, it is first converted to
-        /// UTC and the date then taken from the UTC date/time.  Depending on the value of its
-        /// <see cref="DateTimeOffset.Offset"/> property, this may not necessarily be the same as the date obtained by
-        /// calling its <see cref="DateTimeOffset.Date"/> property.
-        /// </remarks>
-        /// <param name="index">Index at which to set value.</param>
-        /// <param name="value">Date to set.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder Set(int index, DateTimeOffset value)
-        {
-            InnerBuilder.Set(index, Convert(value));
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Swap the values of the dates at the specified indices.
-        /// </summary>
-        /// <param name="i">First index.</param>
-        /// <param name="j">Second index.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder Swap(int i, int j)
-        {
-            InnerBuilder.Swap(i, j);
-            return this as TBuilder;
-        }
-
-        protected abstract TUnderlying Convert(DateTime dateTime);
-
-        protected abstract TUnderlying Convert(DateTimeOffset dateTimeOffset);
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/Decimal128Array.cs b/csharp/src/Apache.Arrow/Arrays/Decimal128Array.cs
deleted file mode 100644
index 128e9e5..0000000
--- a/csharp/src/Apache.Arrow/Arrays/Decimal128Array.cs
+++ /dev/null
@@ -1,95 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Numerics;
-using Apache.Arrow.Arrays;
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class Decimal128Array : FixedSizeBinaryArray
-    {
-        public class Builder : BuilderBase<Decimal128Array, Builder>
-        {
-            public Builder(Decimal128Type type) : base(type, 16)
-            {
-                DataType = type;
-            }
-
-            protected new Decimal128Type DataType { get; }
-
-            protected override Decimal128Array Build(ArrayData data)
-            {
-                return new Decimal128Array(data);
-            }
-
-            public Builder Append(decimal value)
-            {
-                Span<byte> bytes = stackalloc byte[DataType.ByteWidth];
-                DecimalUtility.GetBytes(value, DataType.Precision, DataType.Scale, DataType.ByteWidth, bytes);
-
-                return Append(bytes);
-            }
-
-            public Builder AppendRange(IEnumerable<decimal> values)
-            {
-                if (values == null)
-                {
-                    throw new ArgumentNullException(nameof(values));
-                }
-
-                foreach (decimal d in values)
-                {
-                    Append(d);
-                }
-
-                return Instance;
-            }
-
-            public Builder Set(int index, decimal value)
-            {
-                Span<byte> bytes = stackalloc byte[DataType.ByteWidth];
-                DecimalUtility.GetBytes(value, DataType.Precision, DataType.Scale, DataType.ByteWidth, bytes);
-
-                return Set(index, bytes);
-            }
-        }
-
-        public Decimal128Array(ArrayData data)
-            : base(ArrowTypeId.Decimal128, data)
-        {
-            data.EnsureDataType(ArrowTypeId.Decimal128);
-            data.EnsureBufferCount(2);
-            Debug.Assert(Data.DataType is Decimal128Type);
-        }
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-        public int Scale => ((Decimal128Type)Data.DataType).Scale;
-        public int Precision => ((Decimal128Type)Data.DataType).Precision;
-        public int ByteWidth => ((Decimal128Type)Data.DataType).ByteWidth;
-
-        public decimal? GetValue(int index)
-        {
-            if (IsNull(index))
-            {
-                return null;
-            }
-            return DecimalUtility.GetDecimal(ValueBuffer, index, Scale, ByteWidth);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/Decimal256Array.cs b/csharp/src/Apache.Arrow/Arrays/Decimal256Array.cs
deleted file mode 100644
index fb4cd6b..0000000
--- a/csharp/src/Apache.Arrow/Arrays/Decimal256Array.cs
+++ /dev/null
@@ -1,96 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Numerics;
-using Apache.Arrow.Arrays;
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class Decimal256Array : FixedSizeBinaryArray
-    {
-        public class Builder : BuilderBase<Decimal256Array, Builder>
-        {
-            public Builder(Decimal256Type type) : base(type, 32)
-            {
-                DataType = type;
-            }
-
-            protected new Decimal256Type DataType { get; }
-
-            protected override Decimal256Array Build(ArrayData data)
-            {
-                return new Decimal256Array(data);
-            }
-
-            public Builder Append(decimal value)
-            {
-                Span<byte> bytes = stackalloc byte[DataType.ByteWidth];
-                DecimalUtility.GetBytes(value, DataType.Precision, DataType.Scale, DataType.ByteWidth, bytes);
-
-                return Append(bytes);
-            }
-
-            public Builder AppendRange(IEnumerable<decimal> values)
-            {
-                if (values == null)
-                {
-                    throw new ArgumentNullException(nameof(values));
-                }
-
-                foreach (decimal d in values)
-                {
-                    Append(d);
-                }
-
-                return Instance;
-            }
-
-            public Builder Set(int index, decimal value)
-            {
-                Span<byte> bytes = stackalloc byte[DataType.ByteWidth];
-                DecimalUtility.GetBytes(value, DataType.Precision, DataType.Scale, DataType.ByteWidth, bytes);
-
-                return Set(index, bytes);
-            }
-        }
-
-        public Decimal256Array(ArrayData data)
-            : base(ArrowTypeId.Decimal256, data)
-        {
-            data.EnsureDataType(ArrowTypeId.Decimal256);
-            data.EnsureBufferCount(2);
-            Debug.Assert(Data.DataType is Decimal256Type);
-        }
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-        public int Scale => ((Decimal256Type)Data.DataType).Scale;
-        public int Precision => ((Decimal256Type)Data.DataType).Precision;
-        public int ByteWidth => ((Decimal256Type)Data.DataType).ByteWidth;
-
-        public decimal? GetValue(int index)
-        {
-            if (IsNull(index))
-            {
-                return null;
-            }
-
-            return DecimalUtility.GetDecimal(ValueBuffer, index, Scale, ByteWidth);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/DelegatingArrayBuilder.cs b/csharp/src/Apache.Arrow/Arrays/DelegatingArrayBuilder.cs
deleted file mode 100644
index f2ab3ee..0000000
--- a/csharp/src/Apache.Arrow/Arrays/DelegatingArrayBuilder.cs
+++ /dev/null
@@ -1,102 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using Apache.Arrow.Memory;
-
-namespace Apache.Arrow
-{
-    /// <summary>
-    /// The <see cref="DelegatingArrayBuilder{T,TArray,TBuilder}"/> class can be used as the base for any array builder
-    /// that needs to delegate most of its functionality to an inner array builder.
-    /// </summary>
-    /// <remarks>
-    /// The typical use case is when an array builder may accept a number of different types as input, but which are
-    /// all internally converted to a single type for assembly into an array.
-    /// </remarks>
-    /// <typeparam name="T">Type of item accepted by inner array builder.</typeparam>
-    /// <typeparam name="TArray">Type of array produced by this (and the inner) builder.</typeparam>
-    /// <typeparam name="TBuilder">Type of builder (see Curiously-Recurring Template Pattern).</typeparam>
-    public abstract class DelegatingArrayBuilder<T, TArray, TBuilder> : IArrowArrayBuilder<TArray, TBuilder>
-        where TArray : IArrowArray
-        where TBuilder : class, IArrowArrayBuilder<TArray>
-    {
-        /// <summary>
-        /// Gets the inner array builder.
-        /// </summary>
-        protected IArrowArrayBuilder<T, TArray, IArrowArrayBuilder<TArray>> InnerBuilder { get; }
-
-        /// <summary>
-        /// Gets the number of items added to the array so far.
-        /// </summary>
-        public int Length => InnerBuilder.Length;
-
-        /// <summary>
-        /// Construct a new instance of the <see cref="DelegatingArrayBuilder{T,TArray,TBuilder}"/> class.
-        /// </summary>
-        /// <param name="innerBuilder">Inner array builder.</param>
-        protected DelegatingArrayBuilder(IArrowArrayBuilder<T, TArray, IArrowArrayBuilder<TArray>> innerBuilder)
-        {
-            InnerBuilder = innerBuilder ?? throw new ArgumentNullException(nameof(innerBuilder));
-        }
-
-        /// <summary>
-        /// Build an Arrow Array from the appended contents so far.
-        /// </summary>
-        /// <param name="allocator">Optional memory allocator.</param>
-        /// <returns>Returns the built array.</returns>
-        public TArray Build(MemoryAllocator allocator = default) => InnerBuilder.Build(allocator);
-
-        /// <summary>
-        /// Reserve a given number of items' additional capacity.
-        /// </summary>
-        /// <param name="additionalCapacity">Number of items of required additional capacity.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder Reserve(int additionalCapacity)
-        {
-            InnerBuilder.Reserve(additionalCapacity);
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Resize the array to a given size.
-        /// </summary>
-        /// <remarks>
-        /// Note that if the required capacity is larger than the current length of the populated array so far,
-        /// the array's contents in the new, expanded region are undefined.
-        /// </remarks>
-        /// <remarks>
-        /// Note that if the required capacity is smaller than the current length of the populated array so far,
-        /// the array will be truncated and items at the end of the array will be lost.
-        /// </remarks>
-        /// <param name="capacity">Number of items of required capacity.</param>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder Resize(int capacity)
-        {
-            InnerBuilder.Resize(capacity);
-            return this as TBuilder;
-        }
-
-        /// <summary>
-        /// Clear all contents appended so far.
-        /// </summary>
-        /// <returns>Returns the builder (for fluent-style composition).</returns>
-        public TBuilder Clear()
-        {
-            InnerBuilder.Clear();
-            return this as TBuilder;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/DoubleArray.cs b/csharp/src/Apache.Arrow/Arrays/DoubleArray.cs
deleted file mode 100644
index 6450aa1..0000000
--- a/csharp/src/Apache.Arrow/Arrays/DoubleArray.cs
+++ /dev/null
@@ -1,45 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class DoubleArray : PrimitiveArray<double>
-    {
-        public class Builder : PrimitiveArrayBuilder<double, DoubleArray, Builder>
-        {
-            protected override DoubleArray Build(
-                ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                int length, int nullCount, int offset) =>
-                new DoubleArray(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-        }
-
-        public DoubleArray(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(DoubleType.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public DoubleArray(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Double);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/FixedSizeBinaryArray.cs b/csharp/src/Apache.Arrow/Arrays/FixedSizeBinaryArray.cs
deleted file mode 100644
index c3cf2fc..0000000
--- a/csharp/src/Apache.Arrow/Arrays/FixedSizeBinaryArray.cs
+++ /dev/null
@@ -1,196 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using Apache.Arrow.Memory;
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow.Arrays
-{
-    public class FixedSizeBinaryArray : Array
-    {
-        public FixedSizeBinaryArray(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.FixedSizedBinary);
-            data.EnsureBufferCount(2);
-        }
-
-        public FixedSizeBinaryArray(ArrowTypeId typeId, ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(typeId);
-            data.EnsureBufferCount(2);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-        public ArrowBuffer ValueBuffer => Data.Buffers[1];
-
-        public abstract class BuilderBase<TArray, TBuilder> : IArrowArrayBuilder<byte[], TArray, TBuilder>
-            where TArray : IArrowArray
-            where TBuilder : class, IArrowArrayBuilder<byte[], TArray, TBuilder>
-        {
-            protected IArrowType DataType { get; }
-            protected TBuilder Instance => this as TBuilder;
-            protected int ByteWidth { get; }
-            protected ArrowBuffer.Builder<byte> ValueBuffer { get; }
-            protected ArrowBuffer.BitmapBuilder ValidityBuffer { get; }
-            public int Length => ValueBuffer.Length / ByteWidth;
-            protected int NullCount => this.ValidityBuffer.UnsetBitCount;
-            protected abstract TArray Build(ArrayData data);
-
-            protected BuilderBase(IArrowType dataType, int byteWidth)
-            {
-                DataType = dataType;
-                ByteWidth = byteWidth;
-                ValueBuffer = new ArrowBuffer.Builder<byte>();
-                ValidityBuffer = new ArrowBuffer.BitmapBuilder();
-            }
-
-            public TArray Build(MemoryAllocator allocator = default)
-            {
-                var bufs = new[]
-                {
-                    NullCount > 0 ? ValidityBuffer.Build(allocator) : ArrowBuffer.Empty,
-                    ValueBuffer.Build(ByteWidth, allocator),
-                };
-                var data = new ArrayData(
-                    DataType,
-                    Length,
-                    NullCount,
-                    0,
-                    bufs);
-
-                return Build(data);
-            }
-
-            public TBuilder Reserve(int capacity)
-            {
-                ValueBuffer.Reserve(capacity * ByteWidth);
-                ValidityBuffer.Reserve(capacity + 1);
-                return Instance;
-            }
-
-            public TBuilder Resize(int length)
-            {
-                ValueBuffer.Resize(length * ByteWidth);
-                ValidityBuffer.Resize(length + 1);
-                return Instance;
-            }
-
-            public TBuilder Clear() {
-
-                ValueBuffer.Clear();
-                ValidityBuffer.Clear();
-
-                return Instance;
-            }
-
-            public TBuilder Append(byte[] value)
-            {
-                if(value.Length % ByteWidth != 0)
-                    throw new ArgumentOutOfRangeException("Bytes of length: " + value.Length + " do not conform to the fixed size: " + ByteWidth);
-                return Append(value.AsSpan());
-            }
-            public TBuilder Append(ReadOnlySpan<byte[]> span)
-            {
-                foreach (var b in span)
-                {
-                    Append(b);
-                }
-
-                return Instance;
-            }
-
-            public TBuilder AppendRange(IEnumerable<byte[]> values)
-            {
-                if (values == null)
-                {
-                    throw new ArgumentNullException(nameof(values));
-                }
-
-                foreach (byte[] b in values)
-                {
-                    Append(b);
-                }
-
-                return Instance;
-            }
-
-            public TBuilder Append(ReadOnlySpan<byte> span)
-            {
-                ValueBuffer.Append(span);
-                ValidityBuffer.Append(true);
-                return Instance;
-            }
-
-            public TBuilder AppendNull()
-            {
-                ValueBuffer.Append(new byte[ByteWidth]);
-                ValidityBuffer.Append(false);
-                return Instance;
-            }
-
-            public TBuilder Swap(int i, int j)
-            {
-                int iStart = i * ByteWidth;
-                int jStart = j * ByteWidth;
-                byte[] iBytes = ValueBuffer.Span.Slice(iStart, ByteWidth).ToArray();
-                Span<byte> jBytes = ValueBuffer.Span.Slice(jStart, ByteWidth);
-
-                for (int m = 0; m < ByteWidth; m++)
-                {
-                    ValueBuffer.Span[iStart + m] = jBytes[m];
-                    ValueBuffer.Span[jStart + m] = iBytes[m];
-                }
-
-                ValidityBuffer.Swap(i, j);
-                return Instance;
-            }
-
-            public TBuilder Set(int index, byte[] value)
-            {
-                return Set(index, value.AsSpan());
-            }
-
-            public TBuilder Set(int index, ReadOnlySpan<byte> value)
-            {
-                int startIndex = index * ByteWidth;
-                for (int i = 0; i < ByteWidth; i++)
-                {
-                    ValueBuffer.Span[startIndex + i] = value[i];
-                }
-
-                ValidityBuffer.Set(index, true);
-                return Instance;
-            }
-
-            public TBuilder SetNull(int index)
-            {
-                int startIndex = index * ByteWidth;
-                for (int i = 0; i < ByteWidth; i++)
-                {
-                    ValueBuffer.Span[startIndex + i] = 0;
-                }
-
-                ValidityBuffer.Set(index, false);
-                return Instance;
-            }
-
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/FloatArray.cs b/csharp/src/Apache.Arrow/Arrays/FloatArray.cs
deleted file mode 100644
index 8feca32..0000000
--- a/csharp/src/Apache.Arrow/Arrays/FloatArray.cs
+++ /dev/null
@@ -1,45 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class FloatArray : PrimitiveArray<float>
-    {
-        public class Builder : PrimitiveArrayBuilder<float, FloatArray, Builder>
-        {
-            protected override FloatArray Build(
-                ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                int length, int nullCount, int offset) =>
-                new FloatArray(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-        }
-
-        public FloatArray(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(FloatType.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public FloatArray(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Float);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/Int16Array.cs b/csharp/src/Apache.Arrow/Arrays/Int16Array.cs
deleted file mode 100644
index 0401865..0000000
--- a/csharp/src/Apache.Arrow/Arrays/Int16Array.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class Int16Array : PrimitiveArray<short>
-    {
-        public class Builder : PrimitiveArrayBuilder<short, Int16Array, Builder>
-        {
-            protected override Int16Array Build(
-                ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer, 
-                int length, int nullCount, int offset) =>
-                new Int16Array(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-        }
-
-        public Int16Array(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(Int16Type.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public Int16Array(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Int16);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/Int32Array.cs b/csharp/src/Apache.Arrow/Arrays/Int32Array.cs
deleted file mode 100644
index ef356c7..0000000
--- a/csharp/src/Apache.Arrow/Arrays/Int32Array.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class Int32Array : PrimitiveArray<int>
-    {
-        public class Builder : PrimitiveArrayBuilder<int, Int32Array, Builder>
-        {
-            protected override Int32Array Build(
-                ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                int length, int nullCount, int offset) =>
-                new Int32Array(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-        }
-
-        public Int32Array(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(Int32Type.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public Int32Array(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Int32);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/Int64Array.cs b/csharp/src/Apache.Arrow/Arrays/Int64Array.cs
deleted file mode 100644
index fe8fbc6..0000000
--- a/csharp/src/Apache.Arrow/Arrays/Int64Array.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class Int64Array : PrimitiveArray<long>
-    {
-        public class Builder : PrimitiveArrayBuilder<long, Int64Array, Builder>
-        {
-            protected override Int64Array Build(
-                ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                int length, int nullCount, int offset) =>
-                new Int64Array(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-        }
-
-        public Int64Array(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(Int64Type.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public Int64Array(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Int64);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/Int8Array.cs b/csharp/src/Apache.Arrow/Arrays/Int8Array.cs
deleted file mode 100644
index 58d543a..0000000
--- a/csharp/src/Apache.Arrow/Arrays/Int8Array.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class Int8Array : PrimitiveArray<sbyte>
-    {
-        public class Builder : PrimitiveArrayBuilder<sbyte, Int8Array, Builder>
-        {
-            protected override Int8Array Build(
-                ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                int length, int nullCount, int offset) =>
-                new Int8Array(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-        }
-
-        public Int8Array(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(Int8Type.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public Int8Array(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Int8);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/ListArray.cs b/csharp/src/Apache.Arrow/Arrays/ListArray.cs
deleted file mode 100644
index 7842e99..0000000
--- a/csharp/src/Apache.Arrow/Arrays/ListArray.cs
+++ /dev/null
@@ -1,200 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using Apache.Arrow.Memory;
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class ListArray : Array
-    {
-        public class Builder : IArrowArrayBuilder<ListArray, Builder>
-        {
-            public IArrowArrayBuilder<IArrowArray, IArrowArrayBuilder<IArrowArray>> ValueBuilder { get; }
-
-            public int Length => ValueOffsetsBufferBuilder.Length;
-
-            private ArrowBuffer.Builder<int> ValueOffsetsBufferBuilder { get; }
-
-            private ArrowBuffer.BitmapBuilder ValidityBufferBuilder { get; }
-
-            public int NullCount { get; protected set; }
-
-            private IArrowType DataType { get; }
-
-            public Builder(IArrowType valueDataType) : this(new ListType(valueDataType))
-            {
-            }
-
-            public Builder(Field valueField) : this(new ListType(valueField))
-            {
-            }
-
-            internal Builder(ListType dataType)
-            {
-                ValueBuilder = ArrowArrayBuilderFactory.Build(dataType.ValueDataType);
-                ValueOffsetsBufferBuilder = new ArrowBuffer.Builder<int>();
-                ValidityBufferBuilder = new ArrowBuffer.BitmapBuilder();
-                DataType = dataType;
-            }
-
-            /// <summary>
-            /// Start a new variable-length list slot
-            ///
-            /// This function should be called before beginning to append elements to the
-            /// value builder
-            /// </summary>
-            /// <returns></returns>
-            public Builder Append()
-            {
-                ValueOffsetsBufferBuilder.Append(ValueBuilder.Length);
-                ValidityBufferBuilder.Append(true);
-
-                return this;
-            }
-
-            public Builder AppendNull()
-            {
-                ValueOffsetsBufferBuilder.Append(ValueBuilder.Length);
-                ValidityBufferBuilder.Append(false);
-                NullCount++;
-
-                return this;
-            }
-
-            public ListArray Build(MemoryAllocator allocator = default)
-            {
-                Append();
-
-                ArrowBuffer validityBuffer = NullCount > 0
-                                        ? ValidityBufferBuilder.Build(allocator)
-                                        : ArrowBuffer.Empty;
-
-                return new ListArray(DataType, Length - 1,
-                    ValueOffsetsBufferBuilder.Build(allocator), ValueBuilder.Build(allocator),
-                    validityBuffer, NullCount, 0);
-            }
-
-            public Builder Reserve(int capacity)
-            {
-                ValueOffsetsBufferBuilder.Reserve(capacity + 1);
-                ValidityBufferBuilder.Reserve(capacity + 1);
-                return this;
-            }
-
-            public Builder Resize(int length)
-            {
-                ValueOffsetsBufferBuilder.Resize(length + 1);
-                ValidityBufferBuilder.Resize(length + 1);
-                return this;
-            }
-
-            public Builder Clear()
-            {
-                ValueOffsetsBufferBuilder.Clear();
-                ValueBuilder.Clear();
-                ValidityBufferBuilder.Clear();
-                return this;
-            }
-
-        }
-
-        public IArrowArray Values { get; }
-
-        public ArrowBuffer ValueOffsetsBuffer => Data.Buffers[1];
-
-        public ReadOnlySpan<int> ValueOffsets => ValueOffsetsBuffer.Span.CastTo<int>().Slice(Offset, Length + 1);
-
-        public ListArray(IArrowType dataType, int length,
-            ArrowBuffer valueOffsetsBuffer, IArrowArray values,
-            ArrowBuffer nullBitmapBuffer, int nullCount = 0, int offset = 0)
-            : this(new ArrayData(dataType, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueOffsetsBuffer }, new[] { values.Data }),
-                values)
-        {
-        }
-
-        public ListArray(ArrayData data)
-            : this(data, ArrowArrayFactory.BuildArray(data.Children[0]))
-        {
-        }
-
-        private ListArray(ArrayData data, IArrowArray values) : base(data)
-        {
-            data.EnsureBufferCount(2);
-            data.EnsureDataType(ArrowTypeId.List);
-            Values = values;
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-
-        [Obsolete("This method has been deprecated. Please use ValueOffsets[index] instead.")]
-        public int GetValueOffset(int index)
-        {
-            if (index < 0 || index > Length)
-            {
-                throw new ArgumentOutOfRangeException(nameof(index));
-            }
-            return ValueOffsets[index];
-        }
-
-        public int GetValueLength(int index)
-        {
-            if (index < 0 || index >= Length)
-            {
-                throw new ArgumentOutOfRangeException(nameof(index));
-            }
-
-            if (IsNull(index))
-            {
-                return 0;
-            }
-
-            ReadOnlySpan<int> offsets = ValueOffsets;
-            return offsets[index + 1] - offsets[index];
-        }
-
-        public IArrowArray GetSlicedValues(int index)
-        {
-            if (index < 0 || index >= Length)
-            {
-                throw new ArgumentOutOfRangeException(nameof(index));
-            }
-
-            if (IsNull(index))
-            {
-                return null;
-            }
-
-            if (!(Values is Array array))
-            {
-                return default;
-            }
-
-            return array.Slice(ValueOffsets[index], GetValueLength(index));
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                Values?.Dispose();
-            }
-            base.Dispose(disposing);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/PrimitiveArray.cs b/csharp/src/Apache.Arrow/Arrays/PrimitiveArray.cs
deleted file mode 100644
index 7365a77..0000000
--- a/csharp/src/Apache.Arrow/Arrays/PrimitiveArray.cs
+++ /dev/null
@@ -1,70 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Runtime.CompilerServices;
-
-namespace Apache.Arrow
-{
-    public abstract class PrimitiveArray<T> : Array
-        where T : struct
-    {
-        protected PrimitiveArray(ArrayData data)
-            : base(data)
-        {
-            data.EnsureBufferCount(2);
-        }
-
-        public ArrowBuffer ValueBuffer => Data.Buffers[1];
-
-        public ReadOnlySpan<T> Values => ValueBuffer.Span.CastTo<T>().Slice(Offset, Length);
-
-        [MethodImpl(MethodImplOptions.AggressiveInlining)]
-        public T? GetValue(int index)
-        {
-            if (index < 0 || index >= Length)
-            {
-                throw new ArgumentOutOfRangeException(nameof(index));
-            }
-            return IsValid(index) ? Values[index] : (T?)null;
-        }
-
-        public IList<T?> ToList(bool includeNulls = false)
-        {
-            ReadOnlySpan<T> span = Values;
-            var list = new List<T?>(span.Length);
-
-            for (int i = 0; i < span.Length; i++)
-            {
-                T? value = GetValue(i);
-
-                if (value.HasValue)
-                {
-                    list.Add(value.Value);
-                }
-                else
-                {
-                    if (includeNulls)
-                    {
-                        list.Add(null);
-                    }
-                }
-            }
-
-            return list;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/PrimitiveArrayBuilder.cs b/csharp/src/Apache.Arrow/Arrays/PrimitiveArrayBuilder.cs
deleted file mode 100644
index 326f045..0000000
--- a/csharp/src/Apache.Arrow/Arrays/PrimitiveArrayBuilder.cs
+++ /dev/null
@@ -1,201 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Memory;
-using System;
-using System.Collections.Generic;
-using System.Linq;
-
-namespace Apache.Arrow
-{
-    public abstract class PrimitiveArrayBuilder<TFrom, TTo, TArray, TBuilder> : IArrowArrayBuilder<TArray, TBuilder>
-        where TTo : struct
-        where TArray : IArrowArray
-        where TBuilder : class, IArrowArrayBuilder<TArray>
-    {
-        protected TBuilder Instance => this as TBuilder;
-        protected IArrowArrayBuilder<TTo, TArray, IArrowArrayBuilder<TArray>> ArrayBuilder { get; }
-
-        public int Length => ArrayBuilder.Length;
-
-        internal PrimitiveArrayBuilder(IArrowArrayBuilder<TTo, TArray, IArrowArrayBuilder<TArray>> builder)
-        {
-            ArrayBuilder = builder ?? throw new ArgumentNullException(nameof(builder));
-        }
-
-        public TArray Build(MemoryAllocator allocator = default) => ArrayBuilder.Build(allocator);
-
-        public TBuilder Append(TFrom value)
-        {
-            ArrayBuilder.Append(ConvertTo(value));
-            return Instance;
-        }
-
-        public TBuilder Append(ReadOnlySpan<TFrom> span)
-        {
-            ArrayBuilder.Reserve(span.Length);
-            foreach (TFrom value in span)
-            {
-                Append(value);
-            }
-            return Instance;
-        }
-
-        public TBuilder AppendRange(IEnumerable<TFrom> values)
-        {
-            ArrayBuilder.AppendRange(values.Select(ConvertTo));
-            return Instance;
-        }
-
-        public TBuilder AppendNull()
-        {
-            ArrayBuilder.AppendNull();
-            return Instance;
-        }
-
-        public TBuilder Reserve(int capacity)
-        {
-            ArrayBuilder.Reserve(capacity);
-            return Instance;
-        }
-
-        public TBuilder Resize(int length)
-        {
-            ArrayBuilder.Resize(length);
-            return Instance;
-        }
-
-        public TBuilder Swap(int i, int j)
-        {
-            ArrayBuilder.Swap(i, j);
-            return Instance;
-        }
-
-        public TBuilder Set(int index, TFrom value)
-        {
-            ArrayBuilder.Set(index, ConvertTo(value));
-            return Instance;
-        }
-
-        public TBuilder Clear()
-        {
-            ArrayBuilder.Clear();
-            return Instance;
-        }
-
-        protected abstract TTo ConvertTo(TFrom value);
-    }
-
-    public abstract class PrimitiveArrayBuilder<T, TArray, TBuilder> : IArrowArrayBuilder<T, TArray, TBuilder>
-        where T : struct
-        where TArray : IArrowArray
-        where TBuilder : class, IArrowArrayBuilder<TArray>
-    {
-        protected TBuilder Instance => this as TBuilder;
-        protected ArrowBuffer.Builder<T> ValueBuffer { get; }
-        protected ArrowBuffer.BitmapBuilder ValidityBuffer { get; }
-
-        public int Length => ValueBuffer.Length;
-        protected int NullCount => ValidityBuffer.UnsetBitCount;
-
-        internal PrimitiveArrayBuilder()
-        {
-            ValueBuffer = new ArrowBuffer.Builder<T>();
-            ValidityBuffer = new ArrowBuffer.BitmapBuilder();
-        }
-
-        public TBuilder Resize(int length)
-        {
-            ValueBuffer.Resize(length);
-            ValidityBuffer.Resize(length);
-            return Instance;
-        }
-
-        public TBuilder Reserve(int capacity)
-        {
-            ValueBuffer.Reserve(capacity);
-            ValidityBuffer.Reserve(capacity);
-            return Instance;
-        }
-
-        public TBuilder Append(T value)
-        {
-            ValueBuffer.Append(value);
-            ValidityBuffer.Append(true);
-            return Instance;
-        }
-
-        public TBuilder Append(ReadOnlySpan<T> span)
-        {
-            int len = ValueBuffer.Length;
-            ValueBuffer.Append(span);
-            ValidityBuffer.AppendRange(Enumerable.Repeat(true, ValueBuffer.Length - len));
-            return Instance;
-        }
-
-        public TBuilder AppendRange(IEnumerable<T> values)
-        {
-            int len = ValueBuffer.Length;
-            ValueBuffer.AppendRange(values);
-            ValidityBuffer.AppendRange(Enumerable.Repeat(true, ValueBuffer.Length - len));
-            return Instance;
-        }
-
-        public TBuilder AppendNull()
-        {
-            ValidityBuffer.Append(false);
-            ValueBuffer.Append(default(T));
-            return Instance;
-        }
-
-        public TBuilder Clear()
-        {
-            ValueBuffer.Clear();
-            ValidityBuffer.Clear();
-            return Instance;
-        }
-
-        public TBuilder Set(int index, T value)
-        {
-            ValueBuffer.Span[index] = value;
-            ValidityBuffer.Set(index, true);
-            return Instance;
-        }
-
-        public TBuilder Swap(int i, int j)
-        {
-            T x = ValueBuffer.Span[i];
-            ValueBuffer.Span[i] = ValueBuffer.Span[j];
-            ValueBuffer.Span[j] = x;
-            ValidityBuffer.Swap(i, j);
-            return Instance;
-        }
-
-        public TArray Build(MemoryAllocator allocator = default)
-        {
-            ArrowBuffer validityBuffer = NullCount > 0
-                                    ? ValidityBuffer.Build(allocator)
-                                    : ArrowBuffer.Empty;
-
-            return Build(
-                ValueBuffer.Build(allocator), validityBuffer,
-                ValueBuffer.Length, NullCount, 0);
-        }
-
-        protected abstract TArray Build(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset);
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/StringArray.cs b/csharp/src/Apache.Arrow/Arrays/StringArray.cs
deleted file mode 100644
index f008f56..0000000
--- a/csharp/src/Apache.Arrow/Arrays/StringArray.cs
+++ /dev/null
@@ -1,95 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-using System.Collections.Generic;
-using System.Runtime.InteropServices;
-using System.Text;
-
-namespace Apache.Arrow
-{
-    public class StringArray: BinaryArray
-    {
-        public static readonly Encoding DefaultEncoding = Encoding.UTF8;
-
-        public new class Builder : BuilderBase<StringArray, Builder>
-        {
-            public Builder() : base(StringType.Default) { }
-
-            protected override StringArray Build(ArrayData data)
-            {
-                return new StringArray(data);
-            }
-
-            public Builder Append(string value, Encoding encoding = null)
-            {
-                if (value == null)
-                {
-                    return AppendNull();
-                }
-                encoding = encoding ?? DefaultEncoding;
-                byte[] span = encoding.GetBytes(value);
-                return Append(span.AsSpan());
-            }
-
-            public Builder AppendRange(IEnumerable<string> values, Encoding encoding = null)
-            {
-                foreach (string value in values)
-                {
-                    Append(value, encoding);
-                }
-
-                return this;
-            }
-        }
-
-        public StringArray(ArrayData data)
-            : base(ArrowTypeId.String, data) { }
-
-        public StringArray(int length,
-            ArrowBuffer valueOffsetsBuffer,
-            ArrowBuffer dataBuffer,
-            ArrowBuffer nullBitmapBuffer,
-            int nullCount = 0, int offset = 0)
-            : this(new ArrayData(StringType.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueOffsetsBuffer, dataBuffer }))
-        { }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-        public string GetString(int index, Encoding encoding = default)
-        {
-            encoding = encoding ?? DefaultEncoding;
-
-            ReadOnlySpan<byte> bytes = GetBytes(index);
-
-            if (bytes == default)
-            {
-                return null;
-            }
-            if (bytes.Length == 0)
-            {
-                return string.Empty;
-            }
-
-            unsafe
-            {
-                fixed (byte* data = &MemoryMarshal.GetReference(bytes))
-                    return encoding.GetString(data, bytes.Length);
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/StructArray.cs b/csharp/src/Apache.Arrow/Arrays/StructArray.cs
deleted file mode 100644
index 31aea9b..0000000
--- a/csharp/src/Apache.Arrow/Arrays/StructArray.cs
+++ /dev/null
@@ -1,59 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System.Collections.Generic;
-using System.Linq;
-using System.Threading;
-
-namespace Apache.Arrow
-{
-    public class StructArray : Array
-    {
-        private IReadOnlyList<IArrowArray> _fields;
-
-        public IReadOnlyList<IArrowArray> Fields =>
-            LazyInitializer.EnsureInitialized(ref _fields, () => InitializeFields());
-
-        public StructArray(
-            IArrowType dataType, int length,
-            IEnumerable<IArrowArray> children,
-            ArrowBuffer nullBitmapBuffer, int nullCount = 0, int offset = 0)
-            : this(new ArrayData(
-                dataType, length, nullCount, offset, new[] { nullBitmapBuffer },
-                children.Select(child => child.Data)))
-        {
-            _fields = children.ToArray();
-        }
-
-        public StructArray(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Struct);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-        private IReadOnlyList<IArrowArray> InitializeFields()
-        {
-            IArrowArray[] result = new IArrowArray[Data.Children.Length];
-            for (int i = 0; i < Data.Children.Length; i++)
-            {
-                result[i] = ArrowArrayFactory.BuildArray(Data.Children[i]);
-            }
-            return result;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/TimestampArray.cs b/csharp/src/Apache.Arrow/Arrays/TimestampArray.cs
deleted file mode 100644
index 8b5279a..0000000
--- a/csharp/src/Apache.Arrow/Arrays/TimestampArray.cs
+++ /dev/null
@@ -1,149 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-using System.Diagnostics;
-using System.IO;
-
-namespace Apache.Arrow
-{
-    public class TimestampArray: PrimitiveArray<long>
-    {
-        private static readonly DateTimeOffset s_epoch = new DateTimeOffset(1970, 1, 1, 0, 0, 0, 0, TimeSpan.Zero);
-
-        public class Builder: PrimitiveArrayBuilder<DateTimeOffset, long, TimestampArray, Builder>
-        {
-            internal class TimestampBuilder : PrimitiveArrayBuilder<long, TimestampArray, TimestampBuilder>
-            {
-                internal TimestampBuilder(TimestampType type)
-                {
-                    DataType = type ?? throw new ArgumentNullException(nameof(type));
-                }
-
-                protected TimestampType DataType { get; }
-
-                protected override TimestampArray Build(
-                    ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                    int length, int nullCount, int offset) =>
-                    new TimestampArray(DataType, valueBuffer, nullBitmapBuffer,
-                        length, nullCount, offset);
-            }
-
-            protected TimestampType DataType { get; }
-
-            public Builder()
-                : this(TimestampType.Default) { }
-
-            public Builder(TimeUnit unit, TimeZoneInfo timezone)
-                : this(new TimestampType(unit, timezone)) { }
-
-            public Builder(TimeUnit unit = TimeUnit.Millisecond, string timezone = "+00:00")
-                : this(new TimestampType(unit, timezone)) { }
-
-            public Builder(TimeUnit unit)
-                : this(new TimestampType(unit, (string) null)) { }
-
-            public Builder(TimestampType type)
-                : base(new TimestampBuilder(type))
-            {
-                DataType = type;
-            }
-
-            protected override long ConvertTo(DateTimeOffset value)
-            {
-                // We must return the absolute time since the UNIX epoch while
-                // respecting the timezone offset; the calculation is as follows:
-                //
-                // - Compute time span between epoch and specified time
-                // - Compute time divisions per tick
-
-                TimeSpan timeSpan = value - s_epoch;
-                long ticks = timeSpan.Ticks;
-
-                switch (DataType.Unit)
-                {
-                    case TimeUnit.Nanosecond:
-                        return ticks / 100;
-                    case TimeUnit.Microsecond:
-                        return ticks / TimeSpan.TicksPerMillisecond / 1000;
-                    case TimeUnit.Millisecond:
-                        return ticks / TimeSpan.TicksPerMillisecond;
-                    case TimeUnit.Second:
-                        return ticks / TimeSpan.TicksPerSecond;
-                    default:
-                        throw new InvalidOperationException($"unsupported time unit <{DataType.Unit}>");
-                }
-            }
-        }
-
-        public TimestampArray(
-            TimestampType type,
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(type, length, nullCount, offset,
-                new[] {nullBitmapBuffer, valueBuffer})) { }
-
-        public TimestampArray(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Timestamp);
-
-            Debug.Assert(Data.DataType is TimestampType);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-        public DateTimeOffset GetTimestampUnchecked(int index)
-        {
-            var type = (TimestampType) Data.DataType;
-            long value = Values[index];
-
-            long ticks;
-
-            switch (type.Unit)
-            {
-                case TimeUnit.Nanosecond:
-                    ticks = value * 100;
-                    break;
-                case TimeUnit.Microsecond:
-                    ticks = value * TimeSpan.TicksPerMillisecond * 1000;
-                    break;
-                case TimeUnit.Millisecond:
-                    ticks = value * TimeSpan.TicksPerMillisecond;
-                    break;
-                case TimeUnit.Second:
-                    ticks = value * TimeSpan.TicksPerSecond;
-                    break;
-                default:
-                    throw new InvalidDataException(
-                        $"Unsupported timestamp unit <{type.Unit}>");
-            }
-
-            return new DateTimeOffset(s_epoch.Ticks + ticks, TimeSpan.Zero);
-        }
-
-        public DateTimeOffset? GetTimestamp(int index)
-        {
-            if (IsNull(index))
-            {
-                return null;
-            }
-
-            return GetTimestampUnchecked(index);
-        }
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/UInt16Array.cs b/csharp/src/Apache.Arrow/Arrays/UInt16Array.cs
deleted file mode 100644
index bba244f..0000000
--- a/csharp/src/Apache.Arrow/Arrays/UInt16Array.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class UInt16Array : PrimitiveArray<ushort>
-    {
-        public class Builder : PrimitiveArrayBuilder<ushort, UInt16Array, Builder>
-        {
-            protected override UInt16Array Build(
-                ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                int length, int nullCount, int offset) =>
-                new UInt16Array(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-        }
-
-        public UInt16Array(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(UInt16Type.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public UInt16Array(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.UInt16);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-    }
-
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/UInt32Array.cs b/csharp/src/Apache.Arrow/Arrays/UInt32Array.cs
deleted file mode 100644
index 65320be..0000000
--- a/csharp/src/Apache.Arrow/Arrays/UInt32Array.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class UInt32Array : PrimitiveArray<uint>
-    {
-        public class Builder : PrimitiveArrayBuilder<uint, UInt32Array, Builder>
-        {
-            protected override UInt32Array Build(
-                ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                int length, int nullCount, int offset) =>
-                new UInt32Array(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-        }
-
-        public UInt32Array(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(UInt32Type.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public UInt32Array(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.UInt32);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/UInt64Array.cs b/csharp/src/Apache.Arrow/Arrays/UInt64Array.cs
deleted file mode 100644
index 617949f..0000000
--- a/csharp/src/Apache.Arrow/Arrays/UInt64Array.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class UInt64Array : PrimitiveArray<ulong>
-    {
-        public class Builder : PrimitiveArrayBuilder<ulong, UInt64Array, Builder>
-        {
-            protected override UInt64Array Build(
-                ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                int length, int nullCount, int offset) =>
-                new UInt64Array(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-        }
-
-        public UInt64Array(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(UInt64Type.Default, length, nullCount, offset,
-                new[] { nullBitmapBuffer, valueBuffer }))
-        { }
-
-        public UInt64Array(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.UInt64);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/UInt8Array.cs b/csharp/src/Apache.Arrow/Arrays/UInt8Array.cs
deleted file mode 100644
index 5cde791..0000000
--- a/csharp/src/Apache.Arrow/Arrays/UInt8Array.cs
+++ /dev/null
@@ -1,45 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public class UInt8Array : PrimitiveArray<byte>
-    {
-        public class Builder : PrimitiveArrayBuilder<byte, UInt8Array, Builder>
-        {
-            protected override UInt8Array Build(
-                ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-                int length, int nullCount, int offset) =>
-                new UInt8Array(valueBuffer, nullBitmapBuffer, length, nullCount, offset);
-        }
-
-        public UInt8Array(
-            ArrowBuffer valueBuffer, ArrowBuffer nullBitmapBuffer,
-            int length, int nullCount, int offset)
-            : this(new ArrayData(UInt8Type.Default, length, nullCount, offset, 
-                new[] { nullBitmapBuffer, valueBuffer })) { }
-
-        public UInt8Array(ArrayData data)
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.UInt8);
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Arrays/UnionArray.cs b/csharp/src/Apache.Arrow/Arrays/UnionArray.cs
deleted file mode 100644
index 8bccea2..0000000
--- a/csharp/src/Apache.Arrow/Arrays/UnionArray.cs
+++ /dev/null
@@ -1,51 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-
-namespace Apache.Arrow
-{
-    public class UnionArray: Array
-    {
-        public UnionType Type => Data.DataType as UnionType;
-
-        public UnionMode Mode => Type.Mode;
-
-        public ArrowBuffer TypeBuffer => Data.Buffers[1];
-
-        public ArrowBuffer ValueOffsetBuffer => Data.Buffers[2];
-
-        public ReadOnlySpan<byte> TypeIds => TypeBuffer.Span;
-
-        public ReadOnlySpan<int> ValueOffsets => ValueOffsetBuffer.Span.CastTo<int>().Slice(0, Length + 1);
-
-        public UnionArray(ArrayData data) 
-            : base(data)
-        {
-            data.EnsureDataType(ArrowTypeId.Union);
-            data.EnsureBufferCount(3);
-        }
-
-        public IArrowArray GetChild(int index)
-        {
-            // TODO: Implement
-            throw new NotImplementedException();
-        }
-
-        public override void Accept(IArrowArrayVisitor visitor) => Accept(this, visitor);
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/ArrowBuffer.BitmapBuilder.cs b/csharp/src/Apache.Arrow/ArrowBuffer.BitmapBuilder.cs
deleted file mode 100644
index c27ef35..0000000
--- a/csharp/src/Apache.Arrow/ArrowBuffer.BitmapBuilder.cs
+++ /dev/null
@@ -1,280 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using Apache.Arrow.Memory;
-
-namespace Apache.Arrow
-{
-    public partial struct ArrowBuffer
-    {
-        /// <summary>
-        /// The <see cref="BitmapBuilder"/> class is a complement to <see cref="ArrowBuffer.Builder{T}"/>
-        /// and is designed for boolean fields, which are efficiently bit-packed into byte-aligned memory.
-        /// </summary>
-        public class BitmapBuilder
-        {
-            private const int DefaultBitCapacity = 64;
-
-            /// <summary>
-            /// Gets the number of bits that can be contained in the memory allocated by the current instance.
-            /// </summary>
-            public int Capacity { get; private set; }
-
-            /// <summary>
-            /// Gets the number of bits currently appended.
-            /// </summary>
-            public int Length { get; private set; }
-
-            /// <summary>
-            /// Gets the raw byte memory underpinning the builder.
-            /// </summary>
-            public Memory<byte> Memory { get; private set; }
-
-            /// <summary>
-            /// Gets the span of (bit-packed byte) memory underpinning the builder.
-            /// </summary>
-            public Span<byte> Span => Memory.Span;
-
-            /// <summary>
-            /// Gets the number of set bits (i.e. set to 1).
-            /// </summary>
-            public int SetBitCount { get; private set; }
-
-            /// <summary>
-            /// Gets the number of unset bits (i.e. set to 0).
-            /// </summary>
-            public int UnsetBitCount => Length - SetBitCount;
-
-            /// <summary>
-            /// Creates an instance of the <see cref="BitmapBuilder"/> class.
-            /// </summary>
-            /// <param name="capacity">Number of bits of initial capacity to reserve.</param>
-            public BitmapBuilder(int capacity = DefaultBitCapacity)
-            {
-                Memory = new byte[BitUtility.ByteCount(capacity)];
-                Capacity = capacity;
-            }
-
-            /// <summary>
-            /// Append a single bit.
-            /// </summary>
-            /// <param name="value">Bit to append.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public BitmapBuilder Append(bool value)
-            {
-                if (Length % 8 == 0)
-                {
-                    // Append a new byte to the buffer when needed.
-                    EnsureAdditionalCapacity(1);
-                }
-
-                BitUtility.SetBit(Span, Length, value);
-                Length++;
-                SetBitCount += value ? 1 : 0;
-                return this;
-            }
-
-            /// <summary>
-            /// Append multiple bits.
-            /// </summary>
-            /// <param name="values">Bits to append.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public BitmapBuilder AppendRange(IEnumerable<bool> values)
-            {
-                if (values != null)
-                {
-                    foreach (var v in values)
-                    {
-                        Append(v);
-                    }
-                }
-
-                return this;
-            }
-
-            /// <summary>
-            /// Toggle the bit at a particular index.
-            /// </summary>
-            /// <param name="index">Index of bit to toggle.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public BitmapBuilder Toggle(int index)
-            {
-                CheckIndex(index);
-                bool priorValue = BitUtility.GetBit(Span, index);
-                SetBitCount += priorValue ? -1 : 1;
-                BitUtility.ToggleBit(Span, index);
-                return this;
-            }
-
-            /// <summary>
-            /// Set the bit at a particular index to 1.
-            /// </summary>
-            /// <param name="index">Index of bit to set.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public BitmapBuilder Set(int index)
-            {
-                CheckIndex(index);
-                bool priorValue = BitUtility.GetBit(Span, index);
-                SetBitCount += priorValue ? 0 : 1;
-                BitUtility.SetBit(Span, index);
-                return this;
-            }
-
-            /// <summary>
-            /// Set the bit at a particular index to a given value.
-            /// </summary>
-            /// <param name="index">Index of bit to set/unset.</param>
-            /// <param name="value">Value of bit.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public BitmapBuilder Set(int index, bool value)
-            {
-                CheckIndex(index);
-                bool priorValue = BitUtility.GetBit(Span, index);
-                SetBitCount -= priorValue ? 1 : 0;
-                SetBitCount += value ? 1 : 0;
-                BitUtility.SetBit(Span, index, value);
-                return this;
-            }
-
-            /// <summary>
-            /// Swap the bits at two given indices.
-            /// </summary>
-            /// <param name="i">First index.</param>
-            /// <param name="j">Second index.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public BitmapBuilder Swap(int i, int j)
-            {
-                CheckIndex(i);
-                CheckIndex(j);
-                bool bi = BitUtility.GetBit(Span, i);
-                bool bj = BitUtility.GetBit(Span, j);
-                BitUtility.SetBit(Span, i, bj);
-                BitUtility.SetBit(Span, j, bi);
-                return this;
-            }
-
-            /// <summary>
-            /// Reserve a given number of bits' additional capacity.
-            /// </summary>
-            /// <param name="additionalCapacity">Number of bits of required additional capacity.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public BitmapBuilder Reserve(int additionalCapacity)
-            {
-                if (additionalCapacity < 0)
-                {
-                    throw new ArgumentOutOfRangeException(nameof(additionalCapacity));
-                }
-
-                EnsureAdditionalCapacity(additionalCapacity);
-                return this;
-            }
-
-            /// <summary>
-            /// Resize the buffer to a given size.
-            /// </summary>
-            /// <remarks>
-            /// Note that if the required capacity is larger than the current length of the populated buffer so far,
-            /// the buffer's contents in the new, expanded region are undefined.
-            /// </remarks>
-            /// <remarks>
-            /// Note that if the required capacity is smaller than the current length of the populated buffer so far,
-            /// the buffer will be truncated and items at the end of the buffer will be lost.
-            /// </remarks>
-            /// <param name="capacity">Number of bits of required capacity.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public BitmapBuilder Resize(int capacity)
-            {
-                if (capacity < 0)
-                {
-                    throw new ArgumentOutOfRangeException(nameof(capacity), "Capacity must be non-negative");
-                }
-
-                EnsureCapacity(capacity);
-                Length = capacity;
-
-                SetBitCount = BitUtility.CountBits(Span, 0, Length);
-
-                return this;
-            }
-
-            /// <summary>
-            /// Clear all contents appended so far.
-            /// </summary>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public BitmapBuilder Clear()
-            {
-                Span.Fill(default);
-                Length = 0;
-                SetBitCount = 0;
-                return this;
-            }
-
-            /// <summary>
-            /// Build an Arrow buffer from the appended contents so far.
-            /// </summary>
-            /// <param name="allocator">Optional memory allocator.</param>
-            /// <returns>Returns an <see cref="ArrowBuffer"/> object.</returns>
-            public ArrowBuffer Build(MemoryAllocator allocator = default)
-            {
-                int bufferLength = checked((int)BitUtility.RoundUpToMultipleOf64(Memory.Length));
-                var memoryAllocator = allocator ?? MemoryAllocator.Default.Value;
-                var memoryOwner = memoryAllocator.Allocate(bufferLength);
-                Memory.Slice(0, Memory.Length).CopyTo(memoryOwner.Memory);
-                return new ArrowBuffer(memoryOwner);
-            }
-
-            private void CheckIndex(int index)
-            {
-                if (index < 0 || index >= Length)
-                {
-                    throw new ArgumentOutOfRangeException(nameof(index));
-                }
-            }
-
-            private void EnsureAdditionalCapacity(int additionalCapacity)
-            {
-                EnsureCapacity(checked(Length + additionalCapacity));
-            }
-
-            private void EnsureCapacity(int requiredCapacity)
-            {
-                if (requiredCapacity > Capacity)
-                {
-                    // TODO: specifiable growth strategy
-                    // Double the length of the in-memory array, or use the byte count of the capacity, whichever is
-                    // greater.
-                    int byteCount = Math.Max(BitUtility.ByteCount(requiredCapacity), Memory.Length * 2);
-                    Reallocate(byteCount);
-                    Capacity = byteCount * 8;
-                }
-            }
-
-            private void Reallocate(int numBytes)
-            {
-                if (numBytes != 0)
-                {
-                    Debug.Assert(numBytes > Memory.Length);
-                    var memory = new Memory<byte>(new byte[numBytes]);
-                    Memory.CopyTo(memory);
-
-                    Memory = memory;
-                }
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/ArrowBuffer.Builder.cs b/csharp/src/Apache.Arrow/ArrowBuffer.Builder.cs
deleted file mode 100644
index 7c03027..0000000
--- a/csharp/src/Apache.Arrow/ArrowBuffer.Builder.cs
+++ /dev/null
@@ -1,255 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Memory;
-using System;
-using System.Buffers;
-using System.Collections.Generic;
-using System.Runtime.CompilerServices;
-
-namespace Apache.Arrow
-{
-    public partial struct ArrowBuffer
-    {
-        /// <summary>
-        /// The <see cref="Builder{T}"/> class is able to append value-type items, with fluent-style methods, to build
-        /// up an <see cref="ArrowBuffer"/> of contiguous items.
-        /// </summary>
-        /// <remarks>
-        /// Note that <see cref="bool"/> is not supported as a generic type argument for this class.  Please use
-        /// <see cref="BitmapBuilder"/> instead.
-        /// </remarks>
-        /// <typeparam name="T">Value-type of item to build into a buffer.</typeparam>
-        public class Builder<T>
-            where T : struct
-        {
-            private const int DefaultCapacity = 8;
-
-            private readonly int _size;
-
-            /// <summary>
-            /// Gets the number of items that can be contained in the memory allocated by the current instance.
-            /// </summary>
-            public int Capacity => Memory.Length / _size;
-
-            /// <summary>
-            /// Gets the number of items currently appended.
-            /// </summary>
-            public int Length { get; private set; }
-
-            /// <summary>
-            /// Gets the raw byte memory underpinning the builder.
-            /// </summary>
-            public Memory<byte> Memory { get; private set; }
-
-            /// <summary>
-            /// Gets the span of memory underpinning the builder.
-            /// </summary>
-            public Span<T> Span
-            {
-                [MethodImpl(MethodImplOptions.AggressiveInlining)]
-                get => Memory.Span.CastTo<T>();
-            }
-
-            /// <summary>
-            /// Creates an instance of the <see cref="Builder{T}"/> class.
-            /// </summary>
-            /// <param name="capacity">Number of items of initial capacity to reserve.</param>
-            public Builder(int capacity = DefaultCapacity)
-            {
-                // Using `bool` as the template argument, if used in an unrestricted fashion, would result in a buffer
-                // with inappropriate contents being produced.  Because C# does not support template specialisation,
-                // and because generic type constraints do not support negation, we will throw a runtime error to
-                // indicate that such a template type is not supported.
-                if (typeof(T) == typeof(bool))
-                {
-                    throw new NotSupportedException(
-                        $"An instance of {nameof(Builder<T>)} cannot be instantiated, as `bool` is not an " +
-                        $"appropriate generic type to use with this class - please use {nameof(BitmapBuilder)} " +
-                        $"instead");
-                }
-
-                _size = Unsafe.SizeOf<T>();
-
-                Memory = new byte[capacity * _size];
-                Length = 0;
-            }
-
-            /// <summary>
-            /// Append a buffer, assumed to contain items of the same type.
-            /// </summary>
-            /// <param name="buffer">Buffer to append.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public Builder<T> Append(ArrowBuffer buffer)
-            {
-                Append(buffer.Span.CastTo<T>());
-                return this;
-            }
-
-            /// <summary>
-            /// Append a single item.
-            /// </summary>
-            /// <param name="value">Item to append.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public Builder<T> Append(T value)
-            {
-                EnsureAdditionalCapacity(1);
-                Span[Length++] = value;
-                return this;
-            }
-
-            /// <summary>
-            /// Append a span of items.
-            /// </summary>
-            /// <param name="source">Source of item span.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public Builder<T> Append(ReadOnlySpan<T> source)
-            {
-                EnsureAdditionalCapacity(source.Length);
-                source.CopyTo(Span.Slice(Length, source.Length));
-                Length += source.Length;
-                return this;
-            }
-
-            /// <summary>
-            /// Append a number of items.
-            /// </summary>
-            /// <param name="values">Items to append.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public Builder<T> AppendRange(IEnumerable<T> values)
-            {
-                if (values != null)
-                {
-                    foreach (T v in values)
-                    {
-                        Append(v);
-                    }
-                }
-
-                return this;
-            }
-
-            /// <summary>
-            /// Reserve a given number of items' additional capacity.
-            /// </summary>
-            /// <param name="additionalCapacity">Number of items of required additional capacity.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public Builder<T> Reserve(int additionalCapacity)
-            {
-                if (additionalCapacity < 0)
-                {
-                    throw new ArgumentOutOfRangeException(nameof(additionalCapacity));
-                }
-
-                EnsureAdditionalCapacity(additionalCapacity);
-                return this;
-            }
-
-            /// <summary>
-            /// Resize the buffer to a given size.
-            /// </summary>
-            /// <remarks>
-            /// Note that if the required capacity is larger than the current length of the populated buffer so far,
-            /// the buffer's contents in the new, expanded region are undefined.
-            /// </remarks>
-            /// <remarks>
-            /// Note that if the required capacity is smaller than the current length of the populated buffer so far,
-            /// the buffer will be truncated and items at the end of the buffer will be lost.
-            /// </remarks>
-            /// <param name="capacity">Number of items of required capacity.</param>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public Builder<T> Resize(int capacity)
-            {
-                if (capacity < 0)
-                {
-                    throw new ArgumentOutOfRangeException(nameof(capacity), "Capacity must be non-negative");
-                }
-
-                EnsureCapacity(capacity);
-                Length = capacity;
-
-                return this;
-            }
-
-            /// <summary>
-            /// Clear all contents appended so far.
-            /// </summary>
-            /// <returns>Returns the builder (for fluent-style composition).</returns>
-            public Builder<T> Clear()
-            {
-                Span.Fill(default);
-                Length = 0;
-                return this;
-            }
-
-            /// <summary>
-            /// Build an Arrow buffer from the appended contents so far.
-            /// </summary>
-            /// <param name="allocator">Optional memory allocator.</param>
-            /// <returns>Returns an <see cref="ArrowBuffer"/> object.</returns>
-            public ArrowBuffer Build(MemoryAllocator allocator = default)
-            {
-                return Build(64, allocator);
-            }
-
-            /// <summary>
-            /// Build an Arrow buffer from the appended contents so far of the specified byte size.
-            /// </summary>
-            /// <param name="allocator">Optional memory allocator.</param>
-            /// <returns>Returns an <see cref="ArrowBuffer"/> object.</returns>
-            internal ArrowBuffer Build(int byteSize, MemoryAllocator allocator = default)
-            {
-                int currentBytesLength = Length * _size;
-                int bufferLength = checked((int)BitUtility.RoundUpToMultiplePowerOfTwo(currentBytesLength, byteSize));
-
-                MemoryAllocator memoryAllocator = allocator ?? MemoryAllocator.Default.Value;
-                IMemoryOwner<byte> memoryOwner = memoryAllocator.Allocate(bufferLength);
-                Memory.Slice(0, currentBytesLength).CopyTo(memoryOwner.Memory);
-
-                return new ArrowBuffer(memoryOwner);
-            }
-
-            private void EnsureAdditionalCapacity(int additionalCapacity)
-            {
-                EnsureCapacity(checked(Length + additionalCapacity));
-            }
-
-            private void EnsureCapacity(int requiredCapacity)
-            {
-                if (requiredCapacity > Capacity)
-                {
-                    // TODO: specifiable growth strategy
-                    // Double the length of the in-memory array, or use the byte count of the capacity, whichever is
-                    // greater.
-                    int capacity = Math.Max(requiredCapacity * _size, Memory.Length * 2);
-                    Reallocate(capacity);
-                }
-            }
-
-            private void Reallocate(int numBytes)
-            {
-                if (numBytes != 0)
-                {
-                    var memory = new Memory<byte>(new byte[numBytes]);
-                    Memory.CopyTo(memory);
-
-                    Memory = memory;
-                }
-            }
-
-        }
-
-    }
-}
diff --git a/csharp/src/Apache.Arrow/ArrowBuffer.cs b/csharp/src/Apache.Arrow/ArrowBuffer.cs
deleted file mode 100644
index f8e6759..0000000
--- a/csharp/src/Apache.Arrow/ArrowBuffer.cs
+++ /dev/null
@@ -1,76 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Buffers;
-using System.Runtime.CompilerServices;
-using Apache.Arrow.Memory;
-
-namespace Apache.Arrow
-{
-    public readonly partial struct ArrowBuffer : IEquatable<ArrowBuffer>, IDisposable
-    {
-        private readonly IMemoryOwner<byte> _memoryOwner;
-        private readonly ReadOnlyMemory<byte> _memory;
-
-        public static ArrowBuffer Empty => new ArrowBuffer(Memory<byte>.Empty);
-
-        public ArrowBuffer(ReadOnlyMemory<byte> data)
-        {
-            _memoryOwner = null;
-            _memory = data;
-        }
-
-        internal ArrowBuffer(IMemoryOwner<byte> memoryOwner)
-        {
-            // When wrapping an IMemoryOwner, don't cache the Memory<byte>
-            // since the owner may be disposed, and the cached Memory would
-            // be invalid.
-
-            _memoryOwner = memoryOwner;
-            _memory = Memory<byte>.Empty;
-        }
-
-        public ReadOnlyMemory<byte> Memory =>
-            _memoryOwner != null ? _memoryOwner.Memory : _memory;
-
-        public bool IsEmpty => Memory.IsEmpty;
-
-        public int Length => Memory.Length;
-
-        public ReadOnlySpan<byte> Span
-        {
-            [MethodImpl(MethodImplOptions.AggressiveInlining)]
-            get => Memory.Span;
-        }
-
-        public ArrowBuffer Clone(MemoryAllocator allocator = default)
-        {
-            return new Builder<byte>(Span.Length)
-                .Append(Span)
-                .Build(allocator);
-        }
-
-        public bool Equals(ArrowBuffer other)
-        {
-            return Span.SequenceEqual(other.Span);
-        }
-
-        public void Dispose()
-        {
-            _memoryOwner?.Dispose();
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/BitUtility.cs b/csharp/src/Apache.Arrow/BitUtility.cs
deleted file mode 100644
index 19417bb..0000000
--- a/csharp/src/Apache.Arrow/BitUtility.cs
+++ /dev/null
@@ -1,204 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Diagnostics;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-namespace Apache.Arrow
-{
-    public static class BitUtility
-    {
-        private static ReadOnlySpan<byte> PopcountTable => new byte[] {
-            0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5,
-            1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6,
-            1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6,
-            2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7,
-            1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6,
-            2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7,
-            2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7,
-            3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7, 4, 5, 5, 6, 5, 6, 6, 7, 5, 6, 6, 7, 6, 7, 7, 8,
-        };
-
-        private static ReadOnlySpan<byte> BitMask => new byte[] {
-            1, 2, 4, 8, 16, 32, 64, 128
-        };
-
-        public static bool GetBit(byte data, int index) =>
-            ((data >> index) & 1) != 0;
-
-        public static bool GetBit(ReadOnlySpan<byte> data, int index) =>
-            (data[index / 8] & BitMask[index % 8]) != 0;
-
-        public static void ClearBit(Span<byte> data, int index)
-        {
-            data[index / 8] &= (byte) ~BitMask[index % 8];
-        }
-
-        public static void SetBit(Span<byte> data, int index)
-        {
-            data[index / 8] |= BitMask[index % 8];
-        }
-
-        public static void SetBit(Span<byte> data, int index, bool value)
-        {
-            int idx = index / 8;
-            int mod = index % 8;
-            data[idx] = value
-                ? (byte)(data[idx] | BitMask[mod])
-                : (byte)(data[idx] & ~BitMask[mod]);
-        }
-
-        public static void ToggleBit(Span<byte> data, int index)
-        {
-            data[index / 8] ^= BitMask[index % 8];
-        }
-
-        /// <summary>
-        /// Counts the number of set bits in a span of bytes starting
-        /// at a specific bit offset.
-        /// </summary>
-        /// <param name="data">Span to count bits</param>
-        /// <param name="offset">Bit offset to start counting from</param>
-        /// <returns>Count of set (one) bits</returns>
-        public static int CountBits(ReadOnlySpan<byte> data, int offset) =>
-            CountBits(data, offset, data.Length * 8 - offset);
-
-        /// <summary>
-        /// Counts the number of set bits in a span of bytes starting
-        /// at a specific bit offset, and limiting to a certain number of bits
-        /// in the span.
-        /// </summary>
-        /// <param name="data">Span to count bits.</param>
-        /// <param name="offset">Bit offset to start counting from.</param>
-        /// <param name="length">Maximum of bits in the span to consider.</param>
-        /// <returns>Count of set (one) bits</returns>
-        public static int CountBits(ReadOnlySpan<byte> data, int offset, int length)
-        {
-            int startByteIndex = offset / 8;
-            int startBitOffset = offset % 8;
-            int endByteIndex = (offset + length - 1) / 8;
-            int endBitOffset = (offset + length - 1) % 8;
-            if (startBitOffset < 0)
-                return 0;
-
-            int count = 0;
-            if (startByteIndex == endByteIndex)
-            {
-                // Range starts and ends within the same byte.
-                var slice = data.Slice(startByteIndex, 1);
-                for (int i = startBitOffset; i <= endBitOffset; i++)
-                    count += GetBit(slice, i) ? 1 : 0;
-
-                return count;
-            }
-
-            // If the starting index and ending index are not byte-aligned,
-            // we'll need to count bits the slow way.  If they are
-            // byte-aligned, and for all other bytes in the 'middle', we
-            // can use a faster byte-aligned count.
-            int fullByteStartIndex = startBitOffset == 0 ? startByteIndex : startByteIndex + 1;
-            int fullByteEndIndex = endBitOffset == 7 ? endByteIndex : endByteIndex - 1;
-
-            if (startBitOffset != 0)
-            {
-                var slice = data.Slice(startByteIndex, 1);
-                for (int i = startBitOffset; i <= 7; i++)
-                    count += GetBit(slice, i) ? 1 : 0;
-            }
-
-            if (fullByteEndIndex >= fullByteStartIndex)
-            {
-                var slice = data.Slice(fullByteStartIndex, fullByteEndIndex - fullByteStartIndex + 1);
-                count += CountBits(slice);
-            }
-
-            if (endBitOffset != 7)
-            {
-                var slice = data.Slice(endByteIndex, 1);
-                for (int i = 0; i <= endBitOffset; i++)
-                    count += GetBit(slice, i) ? 1 : 0;
-            }
-
-            return count;
-        }
-
-        /// <summary>
-        /// Counts the number of set bits in a span of bytes.
-        /// </summary>
-        /// <param name="data">Span to count bits</param>
-        /// <returns>Count of set (one) bits.</returns>
-        public static int CountBits(ReadOnlySpan<byte> data)
-        {
-            int count = 0;
-            foreach (byte t in data)
-                count += PopcountTable[t];
-            return count;
-        }
-
-        /// <summary>
-        /// Rounds an integer to the nearest multiple of 64.
-        /// </summary>
-        /// <param name="n">Integer to round.</param>
-        /// <returns>Integer rounded to the nearest multiple of 64.</returns>
-        public static long RoundUpToMultipleOf64(long n) =>
-            RoundUpToMultiplePowerOfTwo(n, 64);
-
-        /// <summary>
-        /// Rounds an integer to the nearest multiple of 8.
-        /// </summary>
-        /// <param name="n">Integer to round.</param>
-        /// <returns>Integer rounded to the nearest multiple of 8.</returns>
-        public static long RoundUpToMultipleOf8(long n) =>
-            RoundUpToMultiplePowerOfTwo(n, 8);
-
-        /// <summary>
-        /// Rounds an integer up to the nearest multiple of factor, where
-        /// factor must be a power of two.
-        ///
-        /// This function does not throw when the factor is not a power of two.
-        /// </summary>
-        /// <param name="n">Integer to round up.</param>
-        /// <param name="factor">Power of two factor to round up to.</param>
-        /// <returns>Integer rounded up to the nearest power of two.</returns>
-        public static long RoundUpToMultiplePowerOfTwo(long n, int factor)
-        {
-            // Assert that factor is a power of two.
-            Debug.Assert(factor > 0 && (factor & (factor - 1)) == 0);
-            return (n + (factor - 1)) & ~(factor - 1);
-        }
-
-        internal static bool IsMultipleOf8(long n) => n % 8 == 0;
-
-        /// <summary>
-        /// Calculates the number of bytes required to store n bits.
-        /// </summary>
-        /// <param name="n">number of bits</param>
-        /// <returns>number of bytes</returns>
-        public static int ByteCount(int n)
-        {
-            Debug.Assert(n >= 0);
-            return n / 8 + (n % 8 != 0 ? 1 : 0); // ceil(n / 8)
-        }
-
-        internal static int ReadInt32(ReadOnlyMemory<byte> value)
-        {
-            Debug.Assert(value.Length >= sizeof(int));
-
-            return Unsafe.ReadUnaligned<int>(ref MemoryMarshal.GetReference(value.Span));
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/ChunkedArray.cs b/csharp/src/Apache.Arrow/ChunkedArray.cs
deleted file mode 100644
index 5f25acf..0000000
--- a/csharp/src/Apache.Arrow/ChunkedArray.cs
+++ /dev/null
@@ -1,91 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using Apache.Arrow;
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    /// <summary>
-    /// A data structure to manage a list of primitive Array arrays logically as one large array
-    /// </summary>
-    public class ChunkedArray
-    {
-        private IList<Array> Arrays { get; }
-        public IArrowType DataType { get; }
-        public long Length { get; }
-        public long NullCount { get; }
-
-        public int ArrayCount
-        {
-            get => Arrays.Count;
-        }
-
-        public Array Array(int index) => Arrays[index];
-
-        public ChunkedArray(IList<Array> arrays)
-        {
-            Arrays = arrays ?? throw new ArgumentNullException(nameof(arrays));
-            if (arrays.Count < 1)
-            {
-                throw new ArgumentException($"Count must be at least 1. Got {arrays.Count} instead");
-            }
-            DataType = arrays[0].Data.DataType;
-            foreach (Array array in arrays)
-            {
-                Length += array.Length;
-                NullCount += array.NullCount;
-            }
-        }
-
-        public ChunkedArray(Array array) : this(new[] { array }) { }
-
-        public ChunkedArray Slice(long offset, long length)
-        {
-            if (offset >= Length)
-            {
-                throw new ArgumentException($"Index {offset} cannot be greater than the Column's Length {Length}");
-            }
-
-            int curArrayIndex = 0;
-            int numArrays = Arrays.Count;
-            while (curArrayIndex < numArrays && offset > Arrays[curArrayIndex].Length)
-            {
-                offset -= Arrays[curArrayIndex].Length;
-                curArrayIndex++;
-            }
-
-            IList<Array> newArrays = new List<Array>();
-            while (curArrayIndex < numArrays && length > 0)
-            {
-                newArrays.Add(Arrays[curArrayIndex].Slice((int)offset,
-                              length > Arrays[curArrayIndex].Length ? Arrays[curArrayIndex].Length : (int)length));
-                length -= Arrays[curArrayIndex].Length - offset;
-                offset = 0;
-                curArrayIndex++;
-            }
-            return new ChunkedArray(newArrays);
-        }
-
-        public ChunkedArray Slice(long offset)
-        {
-            return Slice(offset, Length - offset);
-        }
-
-        // TODO: Flatten for Structs
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Column.cs b/csharp/src/Apache.Arrow/Column.cs
deleted file mode 100644
index 6e76e67..0000000
--- a/csharp/src/Apache.Arrow/Column.cs
+++ /dev/null
@@ -1,73 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    /// <summary>
-    /// A Column data structure that logically represents a column in a dataset
-    /// </summary>
-    public class Column
-    {
-        public Field Field { get;  }
-        public ChunkedArray Data { get; }
-
-        public Column(Field field, IList<Array> arrays)
-        {
-            Data = new ChunkedArray(arrays);
-            Field = field;
-            if (!ValidateArrayDataTypes())
-            {
-                throw new ArgumentException($"{Field.DataType} must match {Data.DataType}");
-            }
-        }
-
-        private Column(Field field, ChunkedArray arrays)
-        {
-            Field = field;
-            Data = arrays;
-        }
-
-        public long Length => Data.Length;
-        public long NullCount => Data.NullCount;
-        public string Name => Field.Name;
-        public IArrowType Type => Field.DataType;
-
-        public Column Slice(int offset, int length)
-        {
-            return new Column(Field, Data.Slice(offset, length));
-        }
-
-        public Column Slice(int offset)
-        {
-            return new Column(Field, Data.Slice(offset));
-        }
-
-        private bool ValidateArrayDataTypes()
-        {
-            for (int i = 0; i < Data.ArrayCount; i++)
-            {
-                if (Data.Array(i).Data.DataType != Field.DataType)
-                {
-                    return false;
-                }
-            }
-            return true;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/DecimalUtility.cs b/csharp/src/Apache.Arrow/DecimalUtility.cs
deleted file mode 100644
index b7ee6b9..0000000
--- a/csharp/src/Apache.Arrow/DecimalUtility.cs
+++ /dev/null
@@ -1,162 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Numerics;
-
-namespace Apache.Arrow
-{
-    /// <summary>
-    /// This is semi-optimised best attempt at converting to / from decimal and the buffers
-    /// </summary>
-    internal static class DecimalUtility
-    {
-        private static readonly BigInteger _maxDecimal = new BigInteger(decimal.MaxValue);
-        private static readonly BigInteger _minDecimal = new BigInteger(decimal.MinValue);
-        private static readonly ulong[] s_powersOfTen =
-        {
-            1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000, 10000000000, 100000000000,
-            1000000000000, 10000000000000, 100000000000000, 1000000000000000, 10000000000000000, 100000000000000000,
-            1000000000000000000, 10000000000000000000
-        };
-
-        private static int PowersOfTenLength => s_powersOfTen.Length - 1;
-
-        internal static decimal GetDecimal(in ArrowBuffer valueBuffer, int index, int scale, int byteWidth)
-        {
-            int startIndex = index * byteWidth;
-            ReadOnlySpan<byte> value = valueBuffer.Span.Slice(startIndex, byteWidth);
-            BigInteger integerValue;
-
-#if NETCOREAPP
-            integerValue = new BigInteger(value);
-#else
-            integerValue = new BigInteger(value.ToArray());
-#endif
-
-            if (integerValue > _maxDecimal || integerValue < _minDecimal)
-            {
-                BigInteger scaleBy = BigInteger.Pow(10, scale);
-                BigInteger integerPart = BigInteger.DivRem(integerValue, scaleBy, out BigInteger fractionalPart);
-                if (integerPart > _maxDecimal || integerPart < _minDecimal) // decimal overflow, not much we can do here - C# needs a BigDecimal
-                {
-                    throw new OverflowException($"Value: {integerPart} too big or too small to be represented as a decimal");
-                }
-                return (decimal)integerPart + DivideByScale(fractionalPart, scale);
-            }
-            else
-            {
-                return DivideByScale(integerValue, scale);
-            }
-        }
-
-        private static decimal DivideByScale(BigInteger integerValue, int scale)
-        {
-            decimal result = (decimal)integerValue; // this cast is safe here
-            int drop = scale;
-            while (drop > PowersOfTenLength)
-            {
-                result /= s_powersOfTen[PowersOfTenLength];
-                drop -= PowersOfTenLength;
-            }
-
-            result /= s_powersOfTen[drop];
-            return result;
-        }
-
-        internal static void GetBytes(decimal value, int precision, int scale, int byteWidth, Span<byte> bytes)
-        {
-            // create BigInteger from decimal
-            BigInteger bigInt;
-            int[] decimalBits = decimal.GetBits(value);
-            int decScale = (decimalBits[3] >> 16) & 0x7F;
-#if NETCOREAPP
-            Span<byte> bigIntBytes = stackalloc byte[12];
-
-                for (int i = 0; i < 3; i++)
-                {
-                    int bit = decimalBits[i];
-                    Span<byte> intBytes = stackalloc byte[4];
-                    if (!BitConverter.TryWriteBytes(intBytes, bit))
-                        throw new OverflowException($"Could not extract bytes from int {bit}");
-
-                    for (int j = 0; j < 4; j++)
-                    {
-                        bigIntBytes[4 * i + j] = intBytes[j];
-                    }
-                }
-                bigInt = new BigInteger(bigIntBytes);
-#else
-            byte[] bigIntBytes = new byte[12];
-                for (int i = 0; i < 3; i++)
-                {
-                    int bit = decimalBits[i];
-                    byte[] intBytes = BitConverter.GetBytes(bit);
-                    for (int j = 0; j < intBytes.Length; j++)
-                    {
-                        bigIntBytes[4 * i + j] = intBytes[j];
-                    }
-                }
-                bigInt = new BigInteger(bigIntBytes);
-#endif
-
-            if (value < 0)
-            {
-                bigInt = -bigInt;
-            }
-
-            // validate precision and scale
-            if (decScale > scale)
-                throw new OverflowException($"Decimal scale cannot be greater than that in the Arrow vector: {decScale} != {scale}");
-
-            if (bigInt >= BigInteger.Pow(10, precision))
-                throw new OverflowException($"Decimal precision cannot be greater than that in the Arrow vector: {value} has precision > {precision}");
-
-            if (decScale < scale) // pad with trailing zeros
-            {
-                bigInt *= BigInteger.Pow(10, scale - decScale);
-            }
-
-            // extract bytes from BigInteger
-            if (bytes.Length != byteWidth)
-            {
-                throw new OverflowException($"ValueBuffer size not equal to {byteWidth} byte width: {bytes.Length}");
-            }
-
-            int bytesWritten;
-#if NETCOREAPP
-            if (!bigInt.TryWriteBytes(bytes, out bytesWritten, false, !BitConverter.IsLittleEndian))
-                throw new OverflowException("Could not extract bytes from integer value " + bigInt);
-#else
-            byte[] tempBytes = bigInt.ToByteArray();
-            tempBytes.CopyTo(bytes);
-            bytesWritten = tempBytes.Length;
-#endif
-
-            if (bytes.Length > byteWidth)
-            {
-                throw new OverflowException($"Decimal size greater than {byteWidth} bytes: {bytes.Length}");
-            }
-
-            if (bigInt.Sign == -1)
-            {
-                for (int i = bytesWritten; i < byteWidth; i++)
-                {
-                    bytes[i] = 255;
-                }
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Extensions/ArrayDataExtensions.cs b/csharp/src/Apache.Arrow/Extensions/ArrayDataExtensions.cs
deleted file mode 100644
index 399d9bf..0000000
--- a/csharp/src/Apache.Arrow/Extensions/ArrayDataExtensions.cs
+++ /dev/null
@@ -1,45 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-
-namespace Apache.Arrow
-{
-    internal static class ArrayDataExtensions
-    {
-        public static void EnsureBufferCount(this ArrayData data, int count)
-        {
-            if (data.Buffers.Length != count)
-            {
-                // TODO: Use localizable string resource
-                throw new ArgumentException(
-                    $"Buffer count <{data.Buffers.Length}> must be at least <{count}>",
-                    nameof(data.Buffers.Length));
-            }
-        }
-
-        public static void EnsureDataType(this ArrayData data, ArrowTypeId id)
-        {
-            if (data.DataType.TypeId != id)
-            {
-                // TODO: Use localizable string resource
-                throw new ArgumentException(
-                    $"Specified array type <{data.DataType.TypeId}> does not match expected type(s) <{id}>",
-                    nameof(data.DataType.TypeId));
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Extensions/ArrayPoolExtensions.cs b/csharp/src/Apache.Arrow/Extensions/ArrayPoolExtensions.cs
deleted file mode 100644
index 9dd9589..0000000
--- a/csharp/src/Apache.Arrow/Extensions/ArrayPoolExtensions.cs
+++ /dev/null
@@ -1,63 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Buffers;
-using System.Runtime.CompilerServices;
-using System.Threading.Tasks;
-
-namespace Apache.Arrow
-{
-    internal static class ArrayPoolExtensions
-    {
-        [MethodImpl(MethodImplOptions.AggressiveInlining)]
-        public static void RentReturn(this ArrayPool<byte> pool, int length, Action<Memory<byte>> action)
-        {
-            byte[] array = null;
-
-            try
-            {
-                array = pool.Rent(length);
-                action(array.AsMemory(0, length));
-            }
-            finally
-            {
-                if (array != null)
-                {
-                    pool.Return(array);
-                }
-            }
-        }
-
-        [MethodImpl(MethodImplOptions.AggressiveInlining)]
-        public static ValueTask RentReturnAsync(this ArrayPool<byte> pool, int length, Func<Memory<byte>, ValueTask> action)
-        {
-            byte[] array = null;
-
-            try
-            {
-                array = pool.Rent(length);
-                return action(array.AsMemory(0, length));
-            }
-            finally
-            {
-                if (array != null)
-                {
-                    pool.Return(array);
-                }
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Extensions/ArrowTypeExtensions.cs b/csharp/src/Apache.Arrow/Extensions/ArrowTypeExtensions.cs
deleted file mode 100644
index 5b04074..0000000
--- a/csharp/src/Apache.Arrow/Extensions/ArrowTypeExtensions.cs
+++ /dev/null
@@ -1,42 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System.Collections.Generic;
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public static class ArrowTypeExtensions
-    {
-        private static readonly ISet<ArrowTypeId> s_integralTypes = 
-            new HashSet<ArrowTypeId>(new[]
-            {
-                ArrowTypeId.Int8, ArrowTypeId.Int16, ArrowTypeId.Int32, ArrowTypeId.Int64,
-                ArrowTypeId.UInt8, ArrowTypeId.UInt16, ArrowTypeId.UInt32, ArrowTypeId.UInt64,
-            });
-
-        private static readonly ISet<ArrowTypeId> s_floatingPointTypes =
-            new HashSet<ArrowTypeId>(new[]
-            {
-                ArrowTypeId.HalfFloat, ArrowTypeId.Float, ArrowTypeId.Double
-            });
-
-        public static bool IsIntegral(this IArrowType type) 
-            => s_integralTypes.Contains(type.TypeId);
-
-        public static bool IsFloatingPoint(this IArrowType type)
-            => s_floatingPointTypes.Contains(type.TypeId);
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Extensions/FlatbufExtensions.cs b/csharp/src/Apache.Arrow/Extensions/FlatbufExtensions.cs
deleted file mode 100644
index d2a70bc..0000000
--- a/csharp/src/Apache.Arrow/Extensions/FlatbufExtensions.cs
+++ /dev/null
@@ -1,85 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-
-namespace Apache.Arrow
-{
-    internal static class FlatbufExtensions
-    {
-        public static bool IsFixedPrimitive(this Flatbuf.Type t)
-        {
-            if (t == Flatbuf.Type.Utf8 || t == Flatbuf.Type.Binary)
-                return false;
-            return true;
-        }
-
-        public static bool IsFixedPrimitive(this Types.IArrowType t)
-        {
-            return t.TypeId.IsFixedPrimitive();
-        }
-
-        public static bool IsFixedPrimitive(this Types.ArrowTypeId t)
-        {
-            if (t == Types.ArrowTypeId.String || t == Types.ArrowTypeId.Binary)
-                return false;
-            return true;
-        }
-
-        public static Types.IntervalUnit ToArrow(this Flatbuf.IntervalUnit unit)
-        {
-            switch (unit)
-            {
-                case Flatbuf.IntervalUnit.DAY_TIME:
-                    return Types.IntervalUnit.DayTime;
-                case Flatbuf.IntervalUnit.YEAR_MONTH:
-                    return Types.IntervalUnit.YearMonth;
-                default:
-                    throw new ArgumentException($"Unexpected Flatbuf IntervalUnit", nameof(unit));
-            }
-        }
-
-        public static Types.DateUnit ToArrow(this Flatbuf.DateUnit unit)
-        {
-            switch (unit)
-            {
-                case Flatbuf.DateUnit.DAY:
-                    return Types.DateUnit.Day;
-                case Flatbuf.DateUnit.MILLISECOND:
-                    return Types.DateUnit.Milliseconds;
-                default:
-                    throw new ArgumentException($"Unexpected Flatbuf IntervalUnit", nameof(unit));
-            }
-        }
-
-        public static Types.TimeUnit ToArrow(this Flatbuf.TimeUnit unit)
-        {
-            switch (unit)
-            {
-                case Flatbuf.TimeUnit.MICROSECOND:
-                    return Types.TimeUnit.Microsecond;
-                case Flatbuf.TimeUnit.MILLISECOND:
-                    return Types.TimeUnit.Millisecond;
-                case Flatbuf.TimeUnit.NANOSECOND:
-                    return Types.TimeUnit.Nanosecond;
-                case Flatbuf.TimeUnit.SECOND:
-                    return Types.TimeUnit.Second;
-                default:
-                    throw new ArgumentException($"Unexpected Flatbuf TimeUnit", nameof(unit));
-            }
-        }
-    }
-}
-
diff --git a/csharp/src/Apache.Arrow/Extensions/SpanExtensions.cs b/csharp/src/Apache.Arrow/Extensions/SpanExtensions.cs
deleted file mode 100644
index b759f38..0000000
--- a/csharp/src/Apache.Arrow/Extensions/SpanExtensions.cs
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Runtime.InteropServices;
-
-namespace Apache.Arrow
-{
-    public static class SpanExtensions
-    {
-        public static Span<T> CastTo<T>(this Span<byte> span)
-            where T: struct =>
-            MemoryMarshal.Cast<byte, T>(span);
-
-        public static ReadOnlySpan<T> CastTo<T>(this ReadOnlySpan<byte> span)
-            where T: struct =>
-                MemoryMarshal.Cast<byte, T>(span);
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Extensions/StreamExtensions.cs b/csharp/src/Apache.Arrow/Extensions/StreamExtensions.cs
deleted file mode 100644
index 1767d23..0000000
--- a/csharp/src/Apache.Arrow/Extensions/StreamExtensions.cs
+++ /dev/null
@@ -1,70 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.IO;
-using System.Threading;
-using System.Threading.Tasks;
-
-namespace Apache.Arrow
-{
-    internal static partial class StreamExtensions
-    {
-        public static async ValueTask<int> ReadFullBufferAsync(this Stream stream, Memory<byte> buffer, CancellationToken cancellationToken = default)
-        {
-            int totalBytesRead = 0;
-            do
-            {
-                int bytesRead = 
-                    await stream.ReadAsync(
-                        buffer.Slice(totalBytesRead, buffer.Length - totalBytesRead),
-                        cancellationToken)
-                    .ConfigureAwait(false);
-
-                if (bytesRead == 0)
-                {
-                    // reached the end of the stream
-                    return totalBytesRead;
-                }
-
-                totalBytesRead += bytesRead;
-            }
-            while (totalBytesRead < buffer.Length);
-
-            return totalBytesRead;
-        }
-
-        public static int ReadFullBuffer(this Stream stream, Memory<byte> buffer)
-        {
-            int totalBytesRead = 0;
-            do
-            {
-                int bytesRead = stream.Read(
-                    buffer.Slice(totalBytesRead, buffer.Length - totalBytesRead));
-
-                if (bytesRead == 0)
-                {
-                    // reached the end of the stream
-                    return totalBytesRead;
-                }
-
-                totalBytesRead += bytesRead;
-            }
-            while (totalBytesRead < buffer.Length);
-
-            return totalBytesRead;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Extensions/StreamExtensions.netcoreapp2.1.cs b/csharp/src/Apache.Arrow/Extensions/StreamExtensions.netcoreapp2.1.cs
deleted file mode 100644
index efcacdc..0000000
--- a/csharp/src/Apache.Arrow/Extensions/StreamExtensions.netcoreapp2.1.cs
+++ /dev/null
@@ -1,34 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.IO;
-
-namespace Apache.Arrow
-{
-    // Helpers to read from Stream to Memory<byte> on netcoreapp
-    internal static partial class StreamExtensions
-    {
-        public static int Read(this Stream stream, Memory<byte> buffer)
-        {
-            return stream.Read(buffer.Span);
-        }
-
-        public static void Write(this Stream stream, ReadOnlyMemory<byte> buffer)
-        {
-            stream.Write(buffer.Span);
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Extensions/StreamExtensions.netstandard.cs b/csharp/src/Apache.Arrow/Extensions/StreamExtensions.netstandard.cs
deleted file mode 100644
index b983be0..0000000
--- a/csharp/src/Apache.Arrow/Extensions/StreamExtensions.netstandard.cs
+++ /dev/null
@@ -1,124 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Buffers;
-using System.IO;
-using System.Runtime.InteropServices;
-using System.Threading;
-using System.Threading.Tasks;
-
-namespace Apache.Arrow
-{
-    // Helpers to write Memory<byte> to Stream on netstandard
-    internal static partial class StreamExtensions
-    {
-        public static int Read(this Stream stream, Memory<byte> buffer)
-        {
-            if (MemoryMarshal.TryGetArray(buffer, out ArraySegment<byte> array))
-            {
-                return stream.Read(array.Array, array.Offset, array.Count);
-            }
-            else
-            {
-                byte[] sharedBuffer = ArrayPool<byte>.Shared.Rent(buffer.Length);
-                try
-                {
-                    int result = stream.Read(sharedBuffer, 0, buffer.Length);
-                    new Span<byte>(sharedBuffer, 0, result).CopyTo(buffer.Span);
-                    return result;
-                }
-                finally
-                {
-                    ArrayPool<byte>.Shared.Return(sharedBuffer);
-                }
-            }
-        }
-
-        public static ValueTask<int> ReadAsync(this Stream stream, Memory<byte> buffer, CancellationToken cancellationToken = default)
-        {
-            if (MemoryMarshal.TryGetArray(buffer, out ArraySegment<byte> array))
-            {
-                return new ValueTask<int>(stream.ReadAsync(array.Array, array.Offset, array.Count, cancellationToken));
-            }
-            else
-            {
-                byte[] sharedBuffer = ArrayPool<byte>.Shared.Rent(buffer.Length);
-                return FinishReadAsync(stream.ReadAsync(sharedBuffer, 0, buffer.Length, cancellationToken), sharedBuffer, buffer);
-
-                async ValueTask<int> FinishReadAsync(Task<int> readTask, byte[] localBuffer, Memory<byte> localDestination)
-                {
-                    try
-                    {
-                        int result = await readTask.ConfigureAwait(false);
-                        new Span<byte>(localBuffer, 0, result).CopyTo(localDestination.Span);
-                        return result;
-                    }
-                    finally
-                    {
-                        ArrayPool<byte>.Shared.Return(localBuffer);
-                    }
-                }
-            }
-        }
-
-        public static void Write(this Stream stream, ReadOnlyMemory<byte> buffer)
-        {
-            if (MemoryMarshal.TryGetArray(buffer, out ArraySegment<byte> array))
-            {
-                stream.Write(array.Array, array.Offset, array.Count);
-            }
-            else
-            {
-                byte[] sharedBuffer = ArrayPool<byte>.Shared.Rent(buffer.Length);
-                try
-                {
-                    buffer.Span.CopyTo(sharedBuffer);
-                    stream.Write(sharedBuffer, 0, buffer.Length);
-                }
-                finally
-                {
-                    ArrayPool<byte>.Shared.Return(sharedBuffer);
-                }
-            }
-        }
-
-        public static ValueTask WriteAsync(this Stream stream, ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default)
-        {
-            if (MemoryMarshal.TryGetArray(buffer, out ArraySegment<byte> array))
-            {
-                return new ValueTask(stream.WriteAsync(array.Array, array.Offset, array.Count, cancellationToken));
-            }
-            else
-            {
-                byte[] sharedBuffer = ArrayPool<byte>.Shared.Rent(buffer.Length);
-                buffer.Span.CopyTo(sharedBuffer);
-                return FinishWriteAsync(stream.WriteAsync(sharedBuffer, 0, buffer.Length, cancellationToken), sharedBuffer);
-            }
-        }
-
-        private static async ValueTask FinishWriteAsync(Task writeTask, byte[] localBuffer)
-        {
-            try
-            {
-                await writeTask.ConfigureAwait(false);
-            }
-            finally
-            {
-                ArrayPool<byte>.Shared.Return(localBuffer);
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Extensions/TimeSpanExtensions.cs b/csharp/src/Apache.Arrow/Extensions/TimeSpanExtensions.cs
deleted file mode 100644
index 133156d..0000000
--- a/csharp/src/Apache.Arrow/Extensions/TimeSpanExtensions.cs
+++ /dev/null
@@ -1,35 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-
-namespace Apache.Arrow
-{
-    public static class TimeSpanExtensions
-    {
-        /// <summary>
-        /// Formats a TimeSpan into an ISO 8601 compliant time offset string.
-        /// </summary>
-        /// <param name="timeSpan">timeSpan to format</param>
-        /// <returns>ISO 8601 offset string</returns>
-        public static string ToTimeZoneOffsetString(this TimeSpan timeSpan)
-        {
-            string sign = timeSpan.Hours >= 0 ? "+" : "-";
-            int hours = Math.Abs(timeSpan.Hours);
-            int minutes = Math.Abs(timeSpan.Minutes);
-            return sign + hours.ToString("00") + ":" + minutes.ToString("00");
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Extensions/TupleExtensions.netstandard.cs b/csharp/src/Apache.Arrow/Extensions/TupleExtensions.netstandard.cs
deleted file mode 100644
index fe42075..0000000
--- a/csharp/src/Apache.Arrow/Extensions/TupleExtensions.netstandard.cs
+++ /dev/null
@@ -1,29 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-
-namespace Apache.Arrow
-{
-    // Helpers to Deconstruct Tuples on netstandard
-    internal static partial class TupleExtensions
-    {
-        public static void Deconstruct<T1, T2>(this Tuple<T1, T2> value, out T1 item1, out T2 item2)
-        {
-            item1 = value.Item1;
-            item2 = value.Item2;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Field.Builder.cs b/csharp/src/Apache.Arrow/Field.Builder.cs
deleted file mode 100644
index 1e7aa19..0000000
--- a/csharp/src/Apache.Arrow/Field.Builder.cs
+++ /dev/null
@@ -1,93 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using Apache.Arrow.Types;
-using System;
-using System.Collections.Generic;
-
-namespace Apache.Arrow
-{
-    public partial class Field
-    {
-        public class Builder
-        {
-            private Dictionary<string, string> _metadata;
-            private string _name;
-            private IArrowType _type;
-            private bool _nullable;
-
-            public Builder()
-            {
-                _name = string.Empty;
-                _type = NullType.Default;
-                _nullable = true;
-            }
-
-            public Builder Name(string value)
-            {
-                if (string.IsNullOrWhiteSpace(value))
-                {
-                    throw new ArgumentNullException(nameof(value));
-                }
-
-                _name = value;
-                return this;
-            }
-
-            public Builder DataType(IArrowType type)
-            {
-                _type = type ?? NullType.Default;
-                return this;
-            }
-
-            public Builder Nullable(bool value)
-            {
-                _nullable = value;
-                return this;
-            }
-
-            public Builder Metadata(string key, string value)
-            {
-                if (string.IsNullOrWhiteSpace(key))
-                {
-                    throw new ArgumentNullException(nameof(key));
-                }
-
-                _metadata ??= new Dictionary<string, string>();
-
-                _metadata[key] = value;
-                return this;
-            }
-
-            public Builder Metadata(IEnumerable<KeyValuePair<string, string>> dictionary)
-            {
-                if (dictionary == null)
-                {
-                    throw new ArgumentNullException(nameof(dictionary));
-                }
-                foreach (KeyValuePair<string, string> entry in dictionary)
-                {
-                    Metadata(entry.Key, entry.Value);
-                }
-                return this;
-            }
-
-            public Field Build()
-            {
-                return new Field(_name, _type, _nullable, _metadata);
-            }
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Field.cs b/csharp/src/Apache.Arrow/Field.cs
deleted file mode 100644
index 6e507b6..0000000
--- a/csharp/src/Apache.Arrow/Field.cs
+++ /dev/null
@@ -1,65 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one or more
-// contributor license agreements. See the NOTICE file distributed with
-// this work for additional information regarding copyright ownership.
-// The ASF licenses this file to You under the Apache License, Version 2.0
-// (the "License"); you may not use this file except in compliance with
-// the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Linq;
-using Apache.Arrow.Types;
-
-namespace Apache.Arrow
-{
-    public partial class Field
-    {
-        public IArrowType DataType { get; }
-
-        public string Name { get; }
-
-        public bool IsNullable { get; }
-
-        public bool HasMetadata => Metadata?.Count > 0;
-
-        public IReadOnlyDictionary<string, string> Metadata { get; }
-
-        public Field(string name, IArrowType dataType, bool nullable,
-            IEnumerable<KeyValuePair<string, string>> metadata = default)
-            : this(name, dataType, nullable)
-        {
-            Metadata = metadata?.ToDictionary(kv => kv.Key, kv => kv.Value);
-
-        }
-
-        internal Field(string name, IArrowType dataType, bool nullable,
-            IReadOnlyDictionary<string, string> metadata, bool copyCollections)
-            : this(name, dataType, nullable)
-        {
-            Debug.Assert(copyCollections == false, "This internal constructor is to not copy the collections.");
-
-            Metadata = metadata;
-        }
-
-        private Field(string name, IArrowType dataType, bool nullable)
-        {
-            if (string.IsNullOrWhiteSpace(name))
-            {
-                throw new ArgumentNullException(nameof(name));
-            }
-
-            Name = name;
-            DataType = dataType ?? NullType.Default;
-            IsNullable = nullable;
-        }
-    }
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Block.cs b/csharp/src/Apache.Arrow/Flatbuf/Block.cs
deleted file mode 100644
index 89c065b..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Block.cs
+++ /dev/null
@@ -1,37 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-using global::System;
-using global::FlatBuffers;
-
-internal struct Block : IFlatbufferObject
-{
-  private Struct __p;
-  public ByteBuffer ByteBuffer { get { return __p.bb; } }
-  public void __init(int _i, ByteBuffer _bb) { __p.bb_pos = _i; __p.bb = _bb; }
-  public Block __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }
-
-  /// Index to the start of the RecordBlock (note this is past the Message header)
-  public long Offset { get { return __p.bb.GetLong(__p.bb_pos + 0); } }
-  /// Length of the metadata
-  public int MetaDataLength { get { return __p.bb.GetInt(__p.bb_pos + 8); } }
-  /// Length of the data (this is aligned so there can be a gap between this and
-  /// the metatdata).
-  public long BodyLength { get { return __p.bb.GetLong(__p.bb_pos + 16); } }
-
-  public static Offset<Block> CreateBlock(FlatBufferBuilder builder, long Offset, int MetaDataLength, long BodyLength) {
-    builder.Prep(8, 24);
-    builder.PutLong(BodyLength);
-    builder.Pad(4);
-    builder.PutInt(MetaDataLength);
-    builder.PutLong(Offset);
-    return new Offset<Block>(builder.Offset);
-  }
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/BodyCompression.cs b/csharp/src/Apache.Arrow/Flatbuf/BodyCompression.cs
deleted file mode 100644
index dda0dd4..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/BodyCompression.cs
+++ /dev/null
@@ -1,47 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-using global::System;
-using global::FlatBuffers;
-
-/// Optional compression for the memory buffers constituting IPC message
-/// bodies. Intended for use with RecordBatch but could be used for other
-/// message types
-internal struct BodyCompression : IFlatbufferObject
-{
-  private Table __p;
-  public ByteBuffer ByteBuffer { get { return __p.bb; } }
-  public static BodyCompression GetRootAsBodyCompression(ByteBuffer _bb) { return GetRootAsBodyCompression(_bb, new BodyCompression()); }
-  public static BodyCompression GetRootAsBodyCompression(ByteBuffer _bb, BodyCompression obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }
-  public void __init(int _i, ByteBuffer _bb) { __p.bb_pos = _i; __p.bb = _bb; }
-  public BodyCompression __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }
-
-  /// Compressor library
-  public CompressionType Codec { get { int o = __p.__offset(4); return o != 0 ? (CompressionType)__p.bb.GetSbyte(o + __p.bb_pos) : CompressionType.LZ4_FRAME; } }
-  /// Indicates the way the record batch body was compressed
-  public BodyCompressionMethod Method { get { int o = __p.__offset(6); return o != 0 ? (BodyCompressionMethod)__p.bb.GetSbyte(o + __p.bb_pos) : BodyCompressionMethod.BUFFER; } }
-
-  public static Offset<BodyCompression> CreateBodyCompression(FlatBufferBuilder builder,
-      CompressionType codec = CompressionType.LZ4_FRAME,
-      BodyCompressionMethod method = BodyCompressionMethod.BUFFER) {
-    builder.StartObject(2);
-    BodyCompression.AddMethod(builder, method);
-    BodyCompression.AddCodec(builder, codec);
-    return BodyCompression.EndBodyCompression(builder);
-  }
-
-  public static void StartBodyCompression(FlatBufferBuilder builder) { builder.StartObject(2); }
-  public static void AddCodec(FlatBufferBuilder builder, CompressionType codec) { builder.AddSbyte(0, (sbyte)codec, 0); }
-  public static void AddMethod(FlatBufferBuilder builder, BodyCompressionMethod method) { builder.AddSbyte(1, (sbyte)method, 0); }
-  public static Offset<BodyCompression> EndBodyCompression(FlatBufferBuilder builder) {
-    int o = builder.EndObject();
-    return new Offset<BodyCompression>(o);
-  }
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Buffer.cs b/csharp/src/Apache.Arrow/Flatbuf/Buffer.cs
deleted file mode 100644
index 7b2315c..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Buffer.cs
+++ /dev/null
@@ -1,36 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-using global::System;
-using global::FlatBuffers;
-
-/// ----------------------------------------------------------------------
-/// A Buffer represents a single contiguous memory segment
-internal struct Buffer : IFlatbufferObject
-{
-  private Struct __p;
-  public ByteBuffer ByteBuffer { get { return __p.bb; } }
-  public void __init(int _i, ByteBuffer _bb) { __p.bb_pos = _i; __p.bb = _bb; }
-  public Buffer __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }
-
-  /// The relative offset into the shared memory page where the bytes for this
-  /// buffer starts
-  public long Offset { get { return __p.bb.GetLong(__p.bb_pos + 0); } }
-  /// The absolute length (in bytes) of the memory buffer. The memory is found
-  /// from offset (inclusive) to offset + length (non-inclusive).
-  public long Length { get { return __p.bb.GetLong(__p.bb_pos + 8); } }
-
-  public static Offset<Buffer> CreateBuffer(FlatBufferBuilder builder, long Offset, long Length) {
-    builder.Prep(8, 16);
-    builder.PutLong(Length);
-    builder.PutLong(Offset);
-    return new Offset<Buffer>(builder.Offset);
-  }
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/DictionaryBatch.cs b/csharp/src/Apache.Arrow/Flatbuf/DictionaryBatch.cs
deleted file mode 100644
index e3afafd..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/DictionaryBatch.cs
+++ /dev/null
@@ -1,54 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-using global::System;
-using global::FlatBuffers;
-
-/// For sending dictionary encoding information. Any Field can be
-/// dictionary-encoded, but in this case none of its children may be
-/// dictionary-encoded.
-/// There is one vector / column per dictionary, but that vector / column
-/// may be spread across multiple dictionary batches by using the isDelta
-/// flag
-internal struct DictionaryBatch : IFlatbufferObject
-{
-  private Table __p;
-  public ByteBuffer ByteBuffer { get { return __p.bb; } }
-  public static DictionaryBatch GetRootAsDictionaryBatch(ByteBuffer _bb) { return GetRootAsDictionaryBatch(_bb, new DictionaryBatch()); }
-  public static DictionaryBatch GetRootAsDictionaryBatch(ByteBuffer _bb, DictionaryBatch obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }
-  public void __init(int _i, ByteBuffer _bb) { __p.bb_pos = _i; __p.bb = _bb; }
-  public DictionaryBatch __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }
-
-  public long Id { get { int o = __p.__offset(4); return o != 0 ? __p.bb.GetLong(o + __p.bb_pos) : (long)0; } }
-  public RecordBatch? Data { get { int o = __p.__offset(6); return o != 0 ? (RecordBatch?)(new RecordBatch()).__assign(__p.__indirect(o + __p.bb_pos), __p.bb) : null; } }
-  /// If isDelta is true the values in the dictionary are to be appended to a
-  /// dictionary with the indicated id
-  public bool IsDelta { get { int o = __p.__offset(8); return o != 0 ? 0!=__p.bb.Get(o + __p.bb_pos) : (bool)false; } }
-
-  public static Offset<DictionaryBatch> CreateDictionaryBatch(FlatBufferBuilder builder,
-      long id = 0,
-      Offset<RecordBatch> dataOffset = default(Offset<RecordBatch>),
-      bool isDelta = false) {
-    builder.StartObject(3);
-    DictionaryBatch.AddId(builder, id);
-    DictionaryBatch.AddData(builder, dataOffset);
-    DictionaryBatch.AddIsDelta(builder, isDelta);
-    return DictionaryBatch.EndDictionaryBatch(builder);
-  }
-
-  public static void StartDictionaryBatch(FlatBufferBuilder builder) { builder.StartObject(3); }
-  public static void AddId(FlatBufferBuilder builder, long id) { builder.AddLong(0, id, 0); }
-  public static void AddData(FlatBufferBuilder builder, Offset<RecordBatch> dataOffset) { builder.AddOffset(1, dataOffset.Value, 0); }
-  public static void AddIsDelta(FlatBufferBuilder builder, bool isDelta) { builder.AddBool(2, isDelta, false); }
-  public static Offset<DictionaryBatch> EndDictionaryBatch(FlatBufferBuilder builder) {
-    int o = builder.EndObject();
-    return new Offset<DictionaryBatch>(o);
-  }
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/DictionaryEncoding.cs b/csharp/src/Apache.Arrow/Flatbuf/DictionaryEncoding.cs
deleted file mode 100644
index 02a35fd..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/DictionaryEncoding.cs
+++ /dev/null
@@ -1,57 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-using global::System;
-using global::FlatBuffers;
-
-/// ----------------------------------------------------------------------
-/// Dictionary encoding metadata
-internal struct DictionaryEncoding : IFlatbufferObject
-{
-  private Table __p;
-  public ByteBuffer ByteBuffer { get { return __p.bb; } }
-  public static DictionaryEncoding GetRootAsDictionaryEncoding(ByteBuffer _bb) { return GetRootAsDictionaryEncoding(_bb, new DictionaryEncoding()); }
-  public static DictionaryEncoding GetRootAsDictionaryEncoding(ByteBuffer _bb, DictionaryEncoding obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }
-  public void __init(int _i, ByteBuffer _bb) { __p.bb_pos = _i; __p.bb = _bb; }
-  public DictionaryEncoding __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }
-
-  /// The known dictionary id in the application where this data is used. In
-  /// the file or streaming formats, the dictionary ids are found in the
-  /// DictionaryBatch messages
-  public long Id { get { int o = __p.__offset(4); return o != 0 ? __p.bb.GetLong(o + __p.bb_pos) : (long)0; } }
-  /// The dictionary indices are constrained to be positive integers. If this
-  /// field is null, the indices must be signed int32
-  public Int? IndexType { get { int o = __p.__offset(6); return o != 0 ? (Int?)(new Int()).__assign(__p.__indirect(o + __p.bb_pos), __p.bb) : null; } }
-  /// By default, dictionaries are not ordered, or the order does not have
-  /// semantic meaning. In some statistical, applications, dictionary-encoding
-  /// is used to represent ordered categorical data, and we provide a way to
-  /// preserve that metadata here
-  public bool IsOrdered { get { int o = __p.__offset(8); return o != 0 ? 0!=__p.bb.Get(o + __p.bb_pos) : (bool)false; } }
-
-  public static Offset<DictionaryEncoding> CreateDictionaryEncoding(FlatBufferBuilder builder,
-      long id = 0,
-      Offset<Int> indexTypeOffset = default(Offset<Int>),
-      bool isOrdered = false) {
-    builder.StartObject(3);
-    DictionaryEncoding.AddId(builder, id);
-    DictionaryEncoding.AddIndexType(builder, indexTypeOffset);
-    DictionaryEncoding.AddIsOrdered(builder, isOrdered);
-    return DictionaryEncoding.EndDictionaryEncoding(builder);
-  }
-
-  public static void StartDictionaryEncoding(FlatBufferBuilder builder) { builder.StartObject(3); }
-  public static void AddId(FlatBufferBuilder builder, long id) { builder.AddLong(0, id, 0); }
-  public static void AddIndexType(FlatBufferBuilder builder, Offset<Int> indexTypeOffset) { builder.AddOffset(1, indexTypeOffset.Value, 0); }
-  public static void AddIsOrdered(FlatBufferBuilder builder, bool isOrdered) { builder.AddBool(2, isOrdered, false); }
-  public static Offset<DictionaryEncoding> EndDictionaryEncoding(FlatBufferBuilder builder) {
-    int o = builder.EndObject();
-    return new Offset<DictionaryEncoding>(o);
-  }
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/BodyCompressionMethod.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/BodyCompressionMethod.cs
deleted file mode 100644
index e9f6b6e..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/BodyCompressionMethod.cs
+++ /dev/null
@@ -1,24 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-/// Provided for forward compatibility in case we need to support different
-/// strategies for compressing the IPC message body (like whole-body
-/// compression rather than buffer-level) in the future
-internal enum BodyCompressionMethod : sbyte
-{
-  /// Each constituent buffer is first compressed with the indicated
-  /// compressor, and then written with the uncompressed length in the first 8
-  /// bytes as a 64-bit little-endian signed integer followed by the compressed
-  /// buffer bytes (and then padding as required by the protocol). The
-  /// uncompressed length may be set to -1 to indicate that the data that
-  /// follows is not compressed, which can be useful for cases where
-  /// compression does not yield appreciable savings.
- BUFFER = 0,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/CompressionType.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/CompressionType.cs
deleted file mode 100644
index 3d886c5..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/CompressionType.cs
+++ /dev/null
@@ -1,15 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-internal enum CompressionType : sbyte
-{
- LZ4_FRAME = 0,
- ZSTD = 1,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/DateUnit.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/DateUnit.cs
deleted file mode 100644
index 46fd0cc..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/DateUnit.cs
+++ /dev/null
@@ -1,15 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-internal enum DateUnit : short
-{
- DAY = 0,
- MILLISECOND = 1,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/Endianness.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/Endianness.cs
deleted file mode 100644
index a0e64f4..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/Endianness.cs
+++ /dev/null
@@ -1,17 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-/// ----------------------------------------------------------------------
-/// Endianness of the platform producing the data
-internal enum Endianness : short
-{
- Little = 0,
- Big = 1,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/Feature.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/Feature.cs
deleted file mode 100644
index a05b6cf..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/Feature.cs
+++ /dev/null
@@ -1,39 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-/// Represents Arrow Features that might not have full support
-/// within implementations. This is intended to be used in
-/// two scenarios:
-///  1.  A mechanism for readers of Arrow Streams
-///      and files to understand that the stream or file makes
-///      use of a feature that isn't supported or unknown to
-///      the implementation (and therefore can meet the Arrow
-///      forward compatibility guarantees).
-///  2.  A means of negotiating between a client and server
-///      what features a stream is allowed to use. The enums
-///      values here are intented to represent higher level
-///      features, additional details maybe negotiated
-///      with key-value pairs specific to the protocol.
-///
-/// Enums added to this list should be assigned power-of-two values
-/// to facilitate exchanging and comparing bitmaps for supported
-/// features.
-internal enum Feature : long
-{
-  /// Needed to make flatbuffers happy.
- UNUSED = 0,
-  /// The stream makes use of multiple full dictionaries with the
-  /// same ID and assumes clients implement dictionary replacement
-  /// correctly.
- DICTIONARY_REPLACEMENT = 1,
-  /// The stream makes use of compressed bodies as described
-  /// in Message.fbs.
- COMPRESSED_BODY = 2,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/IntervalUnit.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/IntervalUnit.cs
deleted file mode 100644
index d136396..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/IntervalUnit.cs
+++ /dev/null
@@ -1,15 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-internal enum IntervalUnit : short
-{
- YEAR_MONTH = 0,
- DAY_TIME = 1,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/MessageHeader.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/MessageHeader.cs
deleted file mode 100644
index 94d239b..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/MessageHeader.cs
+++ /dev/null
@@ -1,26 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-/// ----------------------------------------------------------------------
-/// The root Message type
-/// This union enables us to easily send different message types without
-/// redundant storage, and in the future we can easily add new message types.
-///
-/// Arrow implementations do not need to implement all of the message types,
-/// which may include experimental metadata types. For maximum compatibility,
-/// it is best to send data using RecordBatch
-internal enum MessageHeader : byte
-{
- NONE = 0,
- Schema = 1,
- DictionaryBatch = 2,
- RecordBatch = 3,
- Tensor = 4,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/MetadataVersion.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/MetadataVersion.cs
deleted file mode 100644
index 1e893e8..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/MetadataVersion.cs
+++ /dev/null
@@ -1,29 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-internal enum MetadataVersion : short
-{
-  /// 0.1.0 (October 2016).
- V1 = 0,
-  /// 0.2.0 (February 2017). Non-backwards compatible with V1.
- V2 = 1,
-  /// 0.3.0 -> 0.7.1 (May - December 2017). Non-backwards compatible with V2.
- V3 = 2,
-  /// >= 0.8.0 (December 2017). Non-backwards compatible with V3.
- V4 = 3,
-  /// >= 1.0.0 (July 2020. Backwards compatible with V4 (V5 readers can read V4
-  /// metadata and IPC messages). Implementations are recommended to provide a
-  /// V4 compatibility mode with V5 format changes disabled.
-  ///
-  /// Incompatible changes between V4 and V5:
-  /// - Union buffer layout has changed. In V5, Unions don't have a validity
-  ///   bitmap buffer.
- V5 = 4,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/Precision.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/Precision.cs
deleted file mode 100644
index 3f47a2c..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/Precision.cs
+++ /dev/null
@@ -1,16 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-internal enum Precision : short
-{
- HALF = 0,
- SINGLE = 1,
- DOUBLE = 2,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/TimeUnit.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/TimeUnit.cs
deleted file mode 100644
index 300b835..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/TimeUnit.cs
+++ /dev/null
@@ -1,17 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-internal enum TimeUnit : short
-{
- SECOND = 0,
- MILLISECOND = 1,
- MICROSECOND = 2,
- NANOSECOND = 3,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/Type.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/Type.cs
deleted file mode 100644
index e8a7932..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/Type.cs
+++ /dev/null
@@ -1,38 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-/// ----------------------------------------------------------------------
-/// Top-level Type value, enabling extensible type-specific metadata. We can
-/// add new logical types to Type without breaking backwards compatibility
-internal enum Type : byte
-{
- NONE = 0,
- Null = 1,
- Int = 2,
- FloatingPoint = 3,
- Binary = 4,
- Utf8 = 5,
- Bool = 6,
- Decimal = 7,
- Date = 8,
- Time = 9,
- Timestamp = 10,
- Interval = 11,
- List = 12,
- Struct_ = 13,
- Union = 14,
- FixedSizeBinary = 15,
- FixedSizeList = 16,
- Map = 17,
- Duration = 18,
- LargeBinary = 19,
- LargeUtf8 = 20,
- LargeList = 21,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Enums/UnionMode.cs b/csharp/src/Apache.Arrow/Flatbuf/Enums/UnionMode.cs
deleted file mode 100644
index 724ff4a..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Enums/UnionMode.cs
+++ /dev/null
@@ -1,15 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-internal enum UnionMode : short
-{
- Sparse = 0,
- Dense = 1,
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/Field.cs b/csharp/src/Apache.Arrow/Flatbuf/Field.cs
deleted file mode 100644
index a4f9e30..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/Field.cs
+++ /dev/null
@@ -1,83 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-using global::System;
-using global::FlatBuffers;
-
-/// ----------------------------------------------------------------------
-/// A field represents a named column in a record / row batch or child of a
-/// nested type.
-///
-/// - children is only for nested Arrow arrays
-/// - For primitive types, children will have length 0
-/// - nullable should default to true in general
-internal struct Field : IFlatbufferObject
-{
-  private Table __p;
-  public ByteBuffer ByteBuffer { get { return __p.bb; } }
-  public static Field GetRootAsField(ByteBuffer _bb) { return GetRootAsField(_bb, new Field()); }
-  public static Field GetRootAsField(ByteBuffer _bb, Field obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }
-  public void __init(int _i, ByteBuffer _bb) { __p.bb_pos = _i; __p.bb = _bb; }
-  public Field __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }
-
-  public string Name { get { int o = __p.__offset(4); return o != 0 ? __p.__string(o + __p.bb_pos) : null; } }
-#if ENABLE_SPAN_T
-  public Span<byte> GetNameBytes() { return __p.__vector_as_span(4); }
-#else
-  public ArraySegment<byte>? GetNameBytes() { return __p.__vector_as_arraysegment(4); }
-#endif
-  public byte[] GetNameArray() { return __p.__vector_as_array<byte>(4); }
-  public bool Nullable { get { int o = __p.__offset(6); return o != 0 ? 0!=__p.bb.Get(o + __p.bb_pos) : (bool)false; } }
-  public Type TypeType { get { int o = __p.__offset(8); return o != 0 ? (Type)__p.bb.Get(o + __p.bb_pos) : Flatbuf.Type.NONE; } }
-  public TTable? Type<TTable>() where TTable : struct, IFlatbufferObject { int o = __p.__offset(10); return o != 0 ? (TTable?)__p.__union<TTable>(o) : null; }
-  public DictionaryEncoding? Dictionary { get { int o = __p.__offset(12); return o != 0 ? (DictionaryEncoding?)(new DictionaryEncoding()).__assign(__p.__indirect(o + __p.bb_pos), __p.bb) : null; } }
-  public Field? Children(int j) { int o = __p.__offset(14); return o != 0 ? (Field?)(new Field()).__assign(__p.__indirect(__p.__vector(o) + j * 4), __p.bb) : null; }
-  public int ChildrenLength { get { int o = __p.__offset(14); return o != 0 ? __p.__vector_len(o) : 0; } }
-  public KeyValue? CustomMetadata(int j) { int o = __p.__offset(16); return o != 0 ? (KeyValue?)(new KeyValue()).__assign(__p.__indirect(__p.__vector(o) + j * 4), __p.bb) : null; }
-  public int CustomMetadataLength { get { int o = __p.__offset(16); return o != 0 ? __p.__vector_len(o) : 0; } }
-
-  public static Offset<Field> CreateField(FlatBufferBuilder builder,
-      StringOffset nameOffset = default(StringOffset),
-      bool nullable = false,
-      Type type_type = Flatbuf.Type.NONE,
-      int typeOffset = 0,
-      Offset<DictionaryEncoding> dictionaryOffset = default(Offset<DictionaryEncoding>),
-      VectorOffset childrenOffset = default(VectorOffset),
-      VectorOffset custom_metadataOffset = default(VectorOffset)) {
-    builder.StartObject(7);
-    Field.AddCustomMetadata(builder, custom_metadataOffset);
-    Field.AddChildren(builder, childrenOffset);
-    Field.AddDictionary(builder, dictionaryOffset);
-    Field.AddType(builder, typeOffset);
-    Field.AddName(builder, nameOffset);
-    Field.AddTypeType(builder, type_type);
-    Field.AddNullable(builder, nullable);
-    return Field.EndField(builder);
-  }
-
-  public static void StartField(FlatBufferBuilder builder) { builder.StartObject(7); }
-  public static void AddName(FlatBufferBuilder builder, StringOffset nameOffset) { builder.AddOffset(0, nameOffset.Value, 0); }
-  public static void AddNullable(FlatBufferBuilder builder, bool nullable) { builder.AddBool(1, nullable, false); }
-  public static void AddTypeType(FlatBufferBuilder builder, Type typeType) { builder.AddByte(2, (byte)typeType, 0); }
-  public static void AddType(FlatBufferBuilder builder, int typeOffset) { builder.AddOffset(3, typeOffset, 0); }
-  public static void AddDictionary(FlatBufferBuilder builder, Offset<DictionaryEncoding> dictionaryOffset) { builder.AddOffset(4, dictionaryOffset.Value, 0); }
-  public static void AddChildren(FlatBufferBuilder builder, VectorOffset childrenOffset) { builder.AddOffset(5, childrenOffset.Value, 0); }
-  public static VectorOffset CreateChildrenVector(FlatBufferBuilder builder, Offset<Field>[] data) { builder.StartVector(4, data.Length, 4); for (int i = data.Length - 1; i >= 0; i--) builder.AddOffset(data[i].Value); return builder.EndVector(); }
-  public static VectorOffset CreateChildrenVectorBlock(FlatBufferBuilder builder, Offset<Field>[] data) { builder.StartVector(4, data.Length, 4); builder.Add(data); return builder.EndVector(); }
-  public static void StartChildrenVector(FlatBufferBuilder builder, int numElems) { builder.StartVector(4, numElems, 4); }
-  public static void AddCustomMetadata(FlatBufferBuilder builder, VectorOffset customMetadataOffset) { builder.AddOffset(6, customMetadataOffset.Value, 0); }
-  public static VectorOffset CreateCustomMetadataVector(FlatBufferBuilder builder, Offset<KeyValue>[] data) { builder.StartVector(4, data.Length, 4); for (int i = data.Length - 1; i >= 0; i--) builder.AddOffset(data[i].Value); return builder.EndVector(); }
-  public static VectorOffset CreateCustomMetadataVectorBlock(FlatBufferBuilder builder, Offset<KeyValue>[] data) { builder.StartVector(4, data.Length, 4); builder.Add(data); return builder.EndVector(); }
-  public static void StartCustomMetadataVector(FlatBufferBuilder builder, int numElems) { builder.StartVector(4, numElems, 4); }
-  public static Offset<Field> EndField(FlatBufferBuilder builder) {
-    int o = builder.EndObject();
-    return new Offset<Field>(o);
-  }
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/FieldNode.cs b/csharp/src/Apache.Arrow/Flatbuf/FieldNode.cs
deleted file mode 100644
index 811e10e..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/FieldNode.cs
+++ /dev/null
@@ -1,44 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-using global::System;
-using global::FlatBuffers;
-
-/// ----------------------------------------------------------------------
-/// Data structures for describing a table row batch (a collection of
-/// equal-length Arrow arrays)
-/// Metadata about a field at some level of a nested type tree (but not
-/// its children).
-///
-/// For example, a List<Int16> with values [[1, 2, 3], null, [4], [5, 6], null]
-/// would have {length: 5, null_count: 2} for its List node, and {length: 6,
-/// null_count: 0} for its Int16 node, as separate FieldNode structs
-internal struct FieldNode : IFlatbufferObject
-{
-  private Struct __p;
-  public ByteBuffer ByteBuffer { get { return __p.bb; } }
-  public void __init(int _i, ByteBuffer _bb) { __p.bb_pos = _i; __p.bb = _bb; }
-  public FieldNode __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }
-
-  /// The number of value slots in the Arrow array at this level of a nested
-  /// tree
-  public long Length { get { return __p.bb.GetLong(__p.bb_pos + 0); } }
-  /// The number of observed nulls. Fields with null_count == 0 may choose not
-  /// to write their physical validity bitmap out as a materialized buffer,
-  /// instead setting the length of the bitmap buffer to 0.
-  public long NullCount { get { return __p.bb.GetLong(__p.bb_pos + 8); } }
-
-  public static Offset<FieldNode> CreateFieldNode(FlatBufferBuilder builder, long Length, long NullCount) {
-    builder.Prep(8, 16);
-    builder.PutLong(NullCount);
-    builder.PutLong(Length);
-    return new Offset<FieldNode>(builder.Offset);
-  }
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/FixedSizeBinary.cs b/csharp/src/Apache.Arrow/Flatbuf/FixedSizeBinary.cs
deleted file mode 100644
index b6414a2..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/FixedSizeBinary.cs
+++ /dev/null
@@ -1,39 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-using global::System;
-using global::FlatBuffers;
-
-internal struct FixedSizeBinary : IFlatbufferObject
-{
-  private Table __p;
-  public ByteBuffer ByteBuffer { get { return __p.bb; } }
-  public static FixedSizeBinary GetRootAsFixedSizeBinary(ByteBuffer _bb) { return GetRootAsFixedSizeBinary(_bb, new FixedSizeBinary()); }
-  public static FixedSizeBinary GetRootAsFixedSizeBinary(ByteBuffer _bb, FixedSizeBinary obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }
-  public void __init(int _i, ByteBuffer _bb) { __p.bb_pos = _i; __p.bb = _bb; }
-  public FixedSizeBinary __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }
-
-  /// Number of bytes per value
-  public int ByteWidth { get { int o = __p.__offset(4); return o != 0 ? __p.bb.GetInt(o + __p.bb_pos) : (int)0; } }
-
-  public static Offset<FixedSizeBinary> CreateFixedSizeBinary(FlatBufferBuilder builder,
-      int byteWidth = 0) {
-    builder.StartObject(1);
-    FixedSizeBinary.AddByteWidth(builder, byteWidth);
-    return FixedSizeBinary.EndFixedSizeBinary(builder);
-  }
-
-  public static void StartFixedSizeBinary(FlatBufferBuilder builder) { builder.StartObject(1); }
-  public static void AddByteWidth(FlatBufferBuilder builder, int byteWidth) { builder.AddInt(0, byteWidth, 0); }
-  public static Offset<FixedSizeBinary> EndFixedSizeBinary(FlatBufferBuilder builder) {
-    int o = builder.EndObject();
-    return new Offset<FixedSizeBinary>(o);
-  }
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/FixedSizeList.cs b/csharp/src/Apache.Arrow/Flatbuf/FixedSizeList.cs
deleted file mode 100644
index 0ca69b7..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/FixedSizeList.cs
+++ /dev/null
@@ -1,39 +0,0 @@
-// <auto-generated>
-//  automatically generated by the FlatBuffers compiler, do not modify
-// </auto-generated>
-
-namespace Apache.Arrow.Flatbuf
-{
-
-using global::System;
-using global::FlatBuffers;
-
-internal struct FixedSizeList : IFlatbufferObject
-{
-  private Table __p;
-  public ByteBuffer ByteBuffer { get { return __p.bb; } }
-  public static FixedSizeList GetRootAsFixedSizeList(ByteBuffer _bb) { return GetRootAsFixedSizeList(_bb, new FixedSizeList()); }
-  public static FixedSizeList GetRootAsFixedSizeList(ByteBuffer _bb, FixedSizeList obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }
-  public void __init(int _i, ByteBuffer _bb) { __p.bb_pos = _i; __p.bb = _bb; }
-  public FixedSizeList __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }
-
-  /// Number of list items per value
-  public int ListSize { get { int o = __p.__offset(4); return o != 0 ? __p.bb.GetInt(o + __p.bb_pos) : (int)0; } }
-
-  public static Offset<FixedSizeList> CreateFixedSizeList(FlatBufferBuilder builder,
-      int listSize = 0) {
-    builder.StartObject(1);
-    FixedSizeList.AddListSize(builder, listSize);
-    return FixedSizeList.EndFixedSizeList(builder);
-  }
-
-  public static void StartFixedSizeList(FlatBufferBuilder builder) { builder.StartObject(1); }
-  public static void AddListSize(FlatBufferBuilder builder, int listSize) { builder.AddInt(0, listSize, 0); }
-  public static Offset<FixedSizeList> EndFixedSizeList(FlatBufferBuilder builder) {
-    int o = builder.EndObject();
-    return new Offset<FixedSizeList>(o);
-  }
-};
-
-
-}
diff --git a/csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/ByteBuffer.cs b/csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/ByteBuffer.cs
deleted file mode 100644
index 91cd5cc..0000000
--- a/csharp/src/Apache.Arrow/Flatbuf/FlatBuffers/ByteBuffer.cs
+++ /dev/null
@@ -1,891 +0,0 @@
-/*
- * Copyright 2014 Google Inc. All rights reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// There are 3 #defines that have an impact on performance / features of this ByteBuffer implementation
-//
-//      UNSAFE_BYTEBUFFER 
-//          This will use unsafe code to manipulate the underlying byte array. This
-//          can yield a reasonable performance increase.
-//
-//      BYTEBUFFER_NO_BOUNDS_CHECK
-//          This will disable the bounds check asserts to the byte array. This can
-//          yield a small performance gain in normal code..
-//
-//      ENABLE_SPAN_T
-//          This will enable reading and writing blocks of memory with a Span<T> instead if just
-//          T[].  You can also enable writing directly to shared memory or other types of memory
-//          by providing a custom implementation of ByteBufferAllocator.
-//          ENABLE_SPAN_T also requires UNSAFE_BYTEBUFFER to be defined
-//
-// Using UNSAFE_BYTEBUFFER and BYTEBUFFER_NO_BOUNDS_CHECK together can yield a
-// performance gain of ~15% for some operations, however doing so is potentially 
-// dangerous. Do so at your own risk!
-//
-
-using System;
-using System.Collections.Generic;
-using System.IO;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-using System.Text;
-
-#if ENABLE_SPAN_T
-using System.Buffers.Binary;
-#endif
-
-#if ENABLE_SPAN_T && !UNSAFE_BYTEBUFFER
-#error ENABLE_SPAN_T requires UNSAFE_BYTEBUFFER to also be defined
-#endif
-
-namespace FlatBuffers
-{
-    internal abstract class ByteBufferAllocator
-    {
-#if ENABLE_SPAN_T
-        public abstract Span<byte> Span { get; }
-        public abstract ReadOnlySpan<byte> ReadOnlySpan { get; }
-        public abstract Memory<byte> Memory { get; }
-        public abstract ReadOnlyMemory<byte> ReadOnlyMemory { get; }
-
-#else
-        public byte[] Buffer
-        {
-            get;
-            protected set;
-        }
-#endif
-
-        public int Length
-        {
-            get;
-            protected set;
-        }
-
-        public abstract void GrowFront(int newSize);
-    }
-
-    internal sealed class ByteArrayAllocator : ByteBufferAllocator
-    {
-        private byte[] _buffer;
-
-        public ByteArrayAllocator(byte[] buffer)
-        {
-            _buffer = buffer;
-            InitBuffer();
-        }
-
-        public override void GrowFront(int newSize)
-        {
-            if ((Length & 0xC0000000) != 0)
-                throw new Exception(
-                    "ByteBuffer: cannot grow buffer beyond 2 gigabytes.");
-
-            if (newSize < Length)
-                throw new Exception("ByteBuffer: cannot truncate buffer.");
-
-            byte[] newBuffer = new byte[newSize];
-            System.Buffer.BlockCopy(_buffer, 0, newBuffer, newSize - Length, Length);
-            _buffer = newBuffer;
-            InitBuffer();
-        }
-
-#if ENABLE_SPAN_T
-        public override Span<byte> Span => _buffer;
-        public override ReadOnlySpan<byte> ReadOnlySpan => _buffer;
-        public override Memory<byte> Memory => _buffer;
-        public override ReadOnlyMemory<byte> ReadOnlyMemory => _buffer;
-#endif
-
-        private void InitBuffer()
-        {
-            Length = _buffer.Length;
-#if !ENABLE_SPAN_T
-            Buffer = _buffer;
-#endif
-        }
-    }
-
-    /// <summary>
-    /// Class to mimic Java's ByteBuffer which is used heavily in Flatbuffers.
-    /// </summary>
-    internal class ByteBuffer
-    {
-        private ByteBufferAllocator _buffer;
-        private int _pos;  // Must track start of the buffer.
-
-        public ByteBuffer(ByteBufferAllocator allocator, int position)
-        {
-            _buffer = allocator;
-            _pos = position;
-        }
-
-        public ByteBuffer(int size) : this(new byte[size]) { }
-
-        public ByteBuffer(byte[] buffer) : this(buffer, 0) { }
-
-        public ByteBuffer(byte[] buffer, int pos)
-        {
-            _buffer = new ByteArrayAllocator(buffer);
-            _pos = pos;
-        }
-
-        public int Position
-        {
-            get { return _pos; }
-            set { _pos = value; }
-        }
-
-        public int Length { get { return _buffer.Length; } }
-
-        public void Reset()
-        {
-            _pos = 0;
-        }
-
-        // Create a new ByteBuffer on the same underlying data.
-        // The new ByteBuffer's position will be same as this buffer's.
-        public ByteBuffer Duplicate()
-        {
-            return new ByteBuffer(_buffer, Position);
-        }
-
-        // Increases the size of the ByteBuffer, and copies the old data towards
-        // the end of the new buffer.
-        public void GrowFront(int newSize)
-        {
-            _buffer.GrowFront(newSize);
-        }
-
-        public byte[] ToArray(int pos, int len)
-        {
-            return ToArray<byte>(pos, len);
-        }
-
-        /// <summary>
-        /// A lookup of type sizes. Used instead of Marshal.SizeOf() which has additional
-        /// overhead, but also is compatible with generic functions for simplified code.
-        /// </summary>
-        private static Dictionary<Type, int> genericSizes = new Dictionary<Type, int>()
-        {
-            { typeof(bool),     sizeof(bool) },
-            { typeof(float),    sizeof(float) },
-            { typeof(double),   sizeof(double) },
-            { typeof(sbyte),    sizeof(sbyte) },
-            { typeof(byte),     sizeof(byte) },
-            { typeof(short),    sizeof(short) },
-            { typeof(ushort),   sizeof(ushort) },
-            { typeof(int),      sizeof(int) },
-            { typeof(uint),     sizeof(uint) },
-            { typeof(ulong),    sizeof(ulong) },
-            { typeof(long),     sizeof(long) },
-        };
-
-        /// <summary>
-        /// Get the wire-size (in bytes) of a type supported by flatbuffers.
-        /// </summary>
-        /// <param name="t">The type to get the wire size of</param>
-        /// <returns></returns>
-        public static int SizeOf<T>()
-        {
-            return genericSizes[typeof(T)];
-        }
-
-        /// <summary>
-        /// Checks if the Type provided is supported as scalar value
-        /// </summary>
-        /// <typeparam name="T">The Type to check</typeparam>
-        /// <returns>True if the type is a scalar type that is supported, falsed otherwise</returns>
-        public static bool IsSupportedType<T>()
-        {
-            return genericSizes.ContainsKey(typeof(T));
-        }
-
-        /// <summary>
-        /// Get the wire-size (in bytes) of a typed array
-        /// </summary>
-        /// <typeparam name="T">The type of the array</typeparam>
-        /// <param name="x">The array to get the size of</param>
-        /// <returns>The number of bytes the array takes on wire</returns>
-        public static int ArraySize<T>(T[] x)
-        {
-            return SizeOf<T>() * x.Length;
-        }
-
-#if ENABLE_SPAN_T
-        public static int ArraySize<T>(Span<T> x)
-        {
-            return SizeOf<T>() * x.Length;
-        }
-#endif
-
-        // Get a portion of the buffer casted into an array of type T, given
-        // the buffer position and length.
-#if ENABLE_SPAN_T
-        public T[] ToArray<T>(int pos, int len)
-            where T : struct
-        {
-            AssertOffsetAndLength(pos, len);
-            return MemoryMarshal.Cast<byte, T>(_buffer.ReadOnlySpan.Slice(pos)).Slice(0, len).ToArray();
-        }
-#else
-        public T[] ToArray<T>(int pos, int len)
-            where T : struct
-        {
-            AssertOffsetAndLength(pos, len);
-            T[] arr = new T[len];
-            Buffer.BlockCopy(_buffer.Buffer, pos, arr, 0, ArraySize(arr));
-            return arr;
-        }
-#endif
-
-        public byte[] ToSizedArray()
-        {
-            return ToArray<byte>(Position, Length - Position);
-        }
-
-        public byte[] ToFullArray()
-        {
-            return ToArray<byte>(0, Length);
-        }
-
-#if ENABLE_SPAN_T
-        public ReadOnlyMemory<byte> ToReadOnlyMemory(int pos, int len)
-        {
-            return _buffer.ReadOnlyMemory.Slice(pos, len);
-        }
-
-        public Memory<byte> ToMemory(int pos, int len)
-        {
-            return _buffer.Memory.Slice(pos, len);
-        }
-
-        public Span<byte> ToSpan(int pos, int len)
-        {
-            return _buffer.Span.Slice(pos, len);
-        }
-#else
-        public ArraySegment<byte> ToArraySegment(int pos, int len)
-        {
-            return new ArraySegment<byte>(_buffer.Buffer, pos, len);
-        }
-
... 15897 lines suppressed ...

[arrow-rs] 06/14: Removed js.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit 70125b1ebb5e3526c76979c1088ab717bb563a47
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:20:31 2021 +0000

    Removed js.
---
 js/.eslintignore                                |     1 -
 js/.eslintrc.js                                 |    87 -
 js/.gitignore                                   |    91 -
 js/.npmrc                                       |     2 -
 js/.vscode/launch.json                          |   182 -
 js/DEVELOP.md                                   |   114 -
 js/README.md                                    |   280 -
 js/bin/arrow2csv.js                             |    28 -
 js/bin/file-to-stream.js                        |    40 -
 js/bin/integration.js                           |   255 -
 js/bin/json-to-arrow.js                         |   108 -
 js/bin/print-buffer-alignment.js                |    81 -
 js/bin/stream-to-file.js                        |    40 -
 js/examples/read_file.html                      |    91 -
 js/gulp/argv.js                                 |    39 -
 js/gulp/arrow-task.js                           |    64 -
 js/gulp/clean-task.js                           |    30 -
 js/gulp/closure-task.js                         |   215 -
 js/gulp/compile-task.js                         |    37 -
 js/gulp/memoize-task.js                         |    38 -
 js/gulp/minify-task.js                          |    90 -
 js/gulp/package-task.js                         |    98 -
 js/gulp/test-task.js                            |   176 -
 js/gulp/typescript-task.js                      |    69 -
 js/gulp/util.js                                 |   218 -
 js/gulpfile.js                                  |   102 -
 js/index.js                                     |    18 -
 js/index.mjs                                    |    18 -
 js/index.ts                                     |    18 -
 js/jest.config.js                               |    56 -
 js/jest.coverage.config.js                      |    30 -
 js/lerna.json                                   |    10 -
 js/npm-release.sh                               |    26 -
 js/package.json                                 |   111 -
 js/perf/config.js                               |    30 -
 js/perf/index.js                                |   248 -
 js/perf/table_config.js                         |    48 -
 js/src/Arrow.dom.ts                             |   112 -
 js/src/Arrow.node.ts                            |    32 -
 js/src/Arrow.ts                                 |   134 -
 js/src/bin/arrow2csv.ts                         |   334 -
 js/src/builder.ts                               |   527 --
 js/src/builder/binary.ts                        |    54 -
 js/src/builder/bool.ts                          |    31 -
 js/src/builder/buffer.ts                        |   182 -
 js/src/builder/date.ts                          |    26 -
 js/src/builder/decimal.ts                       |    22 -
 js/src/builder/dictionary.ts                    |    98 -
 js/src/builder/fixedsizebinary.ts               |    22 -
 js/src/builder/fixedsizelist.ts                 |    41 -
 js/src/builder/float.ts                         |    45 -
 js/src/builder/index.ts                         |    82 -
 js/src/builder/int.ts                           |    80 -
 js/src/builder/interval.ts                      |    26 -
 js/src/builder/list.ts                          |    58 -
 js/src/builder/map.ts                           |    64 -
 js/src/builder/null.ts                          |    29 -
 js/src/builder/run.ts                           |    34 -
 js/src/builder/struct.ts                        |    29 -
 js/src/builder/time.ts                          |    30 -
 js/src/builder/timestamp.ts                     |    30 -
 js/src/builder/union.ts                         |    96 -
 js/src/builder/utf8.ts                          |    44 -
 js/src/builder/valid.ts                         |    77 -
 js/src/column.ts                                |   136 -
 js/src/compute/dataframe.ts                     |   283 -
 js/src/compute/predicate.ts                     |   292 -
 js/src/data.ts                                  |   295 -
 js/src/enum.ts                                  |   142 -
 js/src/fb/.eslintrc.js                          |    23 -
 js/src/fb/File.ts                               |   300 -
 js/src/fb/Message.ts                            |   709 --
 js/src/fb/Schema.ts                             |  2658 ------
 js/src/interfaces.ts                            |   403 -
 js/src/io/adapters.ts                           |   398 -
 js/src/io/file.ts                               |   115 -
 js/src/io/interfaces.ts                         |   179 -
 js/src/io/node/builder.ts                       |    98 -
 js/src/io/node/iterable.ts                      |   113 -
 js/src/io/node/reader.ts                        |    86 -
 js/src/io/node/writer.ts                        |    77 -
 js/src/io/stream.ts                             |   152 -
 js/src/io/whatwg/builder.ts                     |   116 -
 js/src/io/whatwg/iterable.ts                    |    93 -
 js/src/io/whatwg/reader.ts                      |    52 -
 js/src/io/whatwg/writer.ts                      |    50 -
 js/src/ipc/message.ts                           |   257 -
 js/src/ipc/metadata/file.ts                     |   163 -
 js/src/ipc/metadata/json.ts                     |   206 -
 js/src/ipc/metadata/message.ts                  |   621 --
 js/src/ipc/reader.ts                            |   739 --
 js/src/ipc/writer.ts                            |   492 -
 js/src/recordbatch.ts                           |   151 -
 js/src/schema.ts                                |   154 -
 js/src/table.ts                                 |   294 -
 js/src/type.ts                                  |   613 --
 js/src/util/args.ts                             |   153 -
 js/src/util/bit.ts                              |   161 -
 js/src/util/bn.ts                               |   231 -
 js/src/util/buffer.ts                           |   235 -
 js/src/util/compat.ts                           |   178 -
 js/src/util/fn.ts                               |    31 -
 js/src/util/int.ts                              |   440 -
 js/src/util/math.ts                             |   105 -
 js/src/util/pretty.ts                           |    37 -
 js/src/util/recordbatch.ts                      |   121 -
 js/src/util/utf8.ts                             |    48 -
 js/src/util/vector.ts                           |   198 -
 js/src/vector.ts                                |    73 -
 js/src/vector/base.ts                           |   111 -
 js/src/vector/binary.ts                         |    27 -
 js/src/vector/bool.ts                           |    35 -
 js/src/vector/chunked.ts                        |   320 -
 js/src/vector/date.ts                           |    51 -
 js/src/vector/decimal.ts                        |    22 -
 js/src/vector/dictionary.ts                     |    60 -
 js/src/vector/fixedsizebinary.ts                |    22 -
 js/src/vector/fixedsizelist.ts                  |    22 -
 js/src/vector/float.ts                          |   144 -
 js/src/vector/index.ts                          |   207 -
 js/src/vector/int.ts                            |   195 -
 js/src/vector/interval.ts                       |    26 -
 js/src/vector/list.ts                           |    22 -
 js/src/vector/map.ts                            |    35 -
 js/src/vector/null.ts                           |    22 -
 js/src/vector/row.ts                            |   296 -
 js/src/vector/struct.ts                         |    32 -
 js/src/vector/time.ts                           |    30 -
 js/src/vector/timestamp.ts                      |    30 -
 js/src/vector/union.ts                          |    32 -
 js/src/vector/utf8.ts                           |    39 -
 js/src/visitor.ts                               |   260 -
 js/src/visitor/builderctor.ts                   |    98 -
 js/src/visitor/bytewidth.ts                     |    68 -
 js/src/visitor/get.ts                           |   321 -
 js/src/visitor/indexof.ts                       |   183 -
 js/src/visitor/iterator.ts                      |   193 -
 js/src/visitor/jsontypeassembler.ts             |    91 -
 js/src/visitor/jsonvectorassembler.ts           |   177 -
 js/src/visitor/set.ts                           |   354 -
 js/src/visitor/toarray.ts                       |   151 -
 js/src/visitor/typeassembler.ts                 |   158 -
 js/src/visitor/typecomparator.ts                |   280 -
 js/src/visitor/typector.ts                      |    82 -
 js/src/visitor/vectorassembler.ts               |   234 -
 js/src/visitor/vectorctor.ts                    |    99 -
 js/src/visitor/vectorloader.ts                  |   141 -
 js/test/.eslintrc.js                            |    31 -
 js/test/Arrow.ts                                |    62 -
 js/test/data/tables.ts                          |    84 -
 js/test/data/tables/generate.py                 |    50 -
 js/test/generate-test-data.ts                   |   723 --
 js/test/inference/column.ts                     |    62 -
 js/test/inference/nested.ts                     |    62 -
 js/test/inference/visitor/get.ts                |    56 -
 js/test/jest-extensions.ts                      |   162 -
 js/test/tsconfig.coverage.json                  |     6 -
 js/test/tsconfig.json                           |    18 -
 js/test/unit/bit-tests.ts                       |    41 -
 js/test/unit/builders/builder-tests.ts          |   269 -
 js/test/unit/builders/date-tests.ts             |   106 -
 js/test/unit/builders/dictionary-tests.ts       |    65 -
 js/test/unit/builders/int64-tests.ts            |    91 -
 js/test/unit/builders/primitive-tests.ts        |   154 -
 js/test/unit/builders/uint64-tests.ts           |    91 -
 js/test/unit/builders/utf8-tests.ts             |    62 -
 js/test/unit/builders/utils.ts                  |   219 -
 js/test/unit/generated-data-tests.ts            |    61 -
 js/test/unit/generated-data-validators.ts       |   184 -
 js/test/unit/int-tests.ts                       |   241 -
 js/test/unit/ipc/helpers.ts                     |   203 -
 js/test/unit/ipc/message-reader-tests.ts        |   109 -
 js/test/unit/ipc/reader/file-reader-tests.ts    |   123 -
 js/test/unit/ipc/reader/from-inference-tests.ts |   152 -
 js/test/unit/ipc/reader/json-reader-tests.ts    |    42 -
 js/test/unit/ipc/reader/stream-reader-tests.ts  |    65 -
 js/test/unit/ipc/reader/streams-dom-tests.ts    |   224 -
 js/test/unit/ipc/reader/streams-node-tests.ts   |   220 -
 js/test/unit/ipc/validate.ts                    |    74 -
 js/test/unit/ipc/writer/file-writer-tests.ts    |    46 -
 js/test/unit/ipc/writer/json-writer-tests.ts    |    48 -
 js/test/unit/ipc/writer/stream-writer-tests.ts  |   119 -
 js/test/unit/ipc/writer/streams-dom-tests.ts    |   273 -
 js/test/unit/ipc/writer/streams-node-tests.ts   |   276 -
 js/test/unit/math-tests.ts                      |    47 -
 js/test/unit/recordbatch/record-batch-tests.ts  |   130 -
 js/test/unit/table-tests.ts                     |   624 --
 js/test/unit/table/assign-tests.ts              |    80 -
 js/test/unit/table/serialize-tests.ts           |   167 -
 js/test/unit/utils.ts                           |    21 -
 js/test/unit/vector/bool-vector-tests.ts        |   111 -
 js/test/unit/vector/date-vector-tests.ts        |   102 -
 js/test/unit/vector/numeric-vector-tests.ts     |   608 --
 js/test/unit/vector/vector-tests.ts             |   127 -
 js/test/unit/visitor-tests.ts                   |   169 -
 js/tsconfig.json                                |    13 -
 js/tsconfig/tsconfig.base.json                  |    45 -
 js/tsconfig/tsconfig.bin.cjs.json               |    11 -
 js/tsconfig/tsconfig.es2015.cjs.json            |     8 -
 js/tsconfig/tsconfig.es2015.cls.json            |    11 -
 js/tsconfig/tsconfig.es2015.esm.json            |     8 -
 js/tsconfig/tsconfig.es5.cjs.json               |     8 -
 js/tsconfig/tsconfig.es5.cls.json               |    11 -
 js/tsconfig/tsconfig.es5.esm.json               |     8 -
 js/tsconfig/tsconfig.esnext.cjs.json            |     8 -
 js/tsconfig/tsconfig.esnext.cls.json            |    11 -
 js/tsconfig/tsconfig.esnext.esm.json            |     8 -
 js/typedoc.js                                   |    30 -
 js/yarn.lock                                    | 10958 ----------------------
 209 files changed, 40867 deletions(-)

diff --git a/js/.eslintignore b/js/.eslintignore
deleted file mode 100644
index a9ba028..0000000
--- a/js/.eslintignore
+++ /dev/null
@@ -1 +0,0 @@
-.eslintrc.js
diff --git a/js/.eslintrc.js b/js/.eslintrc.js
deleted file mode 100644
index 6d5020d..0000000
--- a/js/.eslintrc.js
+++ /dev/null
@@ -1,87 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-module.exports = {
-    env: {
-        browser: true,
-        es6: true,
-        node: true,
-    },
-    parser: "@typescript-eslint/parser",
-    parserOptions: {
-        project: "tsconfig.json",
-        sourceType: "module",
-        ecmaVersion: 2020,
-    },
-    plugins: ["@typescript-eslint", "jest"],
-    extends: [
-        "eslint:recommended",
-        "plugin:jest/recommended",
-        "plugin:jest/style",
-        "plugin:@typescript-eslint/recommended",
-    ],
-    rules: {
-        "@typescript-eslint/member-delimiter-style": [
-            "error",
-            {
-                multiline: {
-                    delimiter: "semi",
-                    requireLast: true,
-                },
-                singleline: {
-                    delimiter: "semi",
-                    requireLast: false,
-                },
-            },
-        ],
-        "@typescript-eslint/no-namespace": ["error", { "allowDeclarations": true }],
-        "@typescript-eslint/no-require-imports": "error",
-        "@typescript-eslint/no-var-requires": "off",  // handled by rule above
-        "@typescript-eslint/quotes": [
-            "error",
-            "single",
-            {
-                avoidEscape: true,
-                allowTemplateLiterals: true
-            },
-        ],
-        "@typescript-eslint/semi": ["error", "always"],
-        "@typescript-eslint/type-annotation-spacing": "error",
-        "@typescript-eslint/indent": "off",
-        "@typescript-eslint/no-empty-function": "off",
-        "@typescript-eslint/no-unused-expressions": "off",
-        "@typescript-eslint/no-use-before-define": "off",
-        "@typescript-eslint/explicit-module-boundary-types": "off",
-        "@typescript-eslint/no-explicit-any": "off",
-        "@typescript-eslint/no-misused-new": "off",
-        "@typescript-eslint/ban-ts-comment": "off",
-        "@typescript-eslint/no-non-null-assertion": "off",
-        "@typescript-eslint/no-unused-vars": "off",  // ts already takes care of this
-
-        "prefer-const": ["error", {
-            "destructuring": "all"
-        }],
-        "curly": ["error", "multi-line"],
-        "brace-style": ["error", "1tbs", { "allowSingleLine": true }],
-        "eol-last": "error",
-        "no-multiple-empty-lines": "error",
-        "no-trailing-spaces": "error",
-        "no-var": "error",
-        "no-empty": "off",
-        "no-cond-assign": "off"
-    },
-};
diff --git a/js/.gitignore b/js/.gitignore
deleted file mode 100644
index 9a11ab8..0000000
--- a/js/.gitignore
+++ /dev/null
@@ -1,91 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Logs
-logs
-*.log
-.esm-cache
-npm-debug.log*
-yarn-debug.log*
-yarn-error.log*
-
-.vscode/**
-!.vscode/launch.json
-
-# Runtime data
-pids
-*.pid
-*.seed
-*.pid.lock
-
-# Directory for instrumented libs generated by jscoverage/JSCover
-lib-cov
-
-# Coverage directory used by tools like istanbul
-coverage
-
-# nyc test coverage
-.nyc_output
-
-# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
-.grunt
-
-# Bower dependency directory (https://bower.io/)
-bower_components
-
-# node-waf configuration
-.lock-wscript
-
-# Compiled binary addons (http://nodejs.org/api/addons.html)
-build/Release
-
-# Dependency directories
-node_modules/
-jspm_packages/
-
-# Optional npm cache directory
-.npm
-
-# JS package manager files
-package-lock.json
-
-# Optional eslint cache
-.eslintcache
-
-# Optional REPL history
-.node_repl_history
-
-# Output of 'npm pack'
-*.tgz
-
-# Yarn Integrity file
-.yarn-integrity
-
-# dotenv environment variables file
-.env
-
-# compilation targets
-doc
-dist
-targets
-
-# test data files
-test/data/**/*.json
-test/data/**/*.arrow
-
-# jest snapshots (too big)
-test/__snapshots__/
diff --git a/js/.npmrc b/js/.npmrc
deleted file mode 100644
index 5536efc..0000000
--- a/js/.npmrc
+++ /dev/null
@@ -1,2 +0,0 @@
-save-prefix=
-engine-strict=true
diff --git a/js/.vscode/launch.json b/js/.vscode/launch.json
deleted file mode 100644
index 43851ba..0000000
--- a/js/.vscode/launch.json
+++ /dev/null
@@ -1,182 +0,0 @@
-{
-    // Use IntelliSense to learn about possible attributes.
-    // Hover to view descriptions of existing attributes.
-    // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
-    "version": "0.2.0",
-    "configurations": [
-        {
-            "type": "node",
-            "request": "launch",
-            "name": "Debug Gulp Build",
-            "program": "${workspaceFolder}/node_modules/gulp/bin/gulp.js",
-            "args": [
-                "build",
-                // Specify we want to debug the "src" target, which won't clean or build -- essentially a "dry-run" of the gulp build
-                "--target", "src"
-            ]
-        },
-        {
-            "type": "node",
-            "request": "launch",
-            "name": "Debug Unit Tests",
-            "cwd": "${workspaceRoot}",
-            "program": "${workspaceFolder}/node_modules/.bin/jest",
-            "skipFiles": [
-                "<node_internals>/**/*.js",
-                "${workspaceFolder}/node_modules/**/*.js"
-            ],
-            "env": {
-                "NODE_NO_WARNINGS": "1",
-                "READABLE_STREAM": "disable",
-                "TEST_DOM_STREAMS": "true",
-                "TEST_NODE_STREAMS": "true",
-                // Modify these environment variables to run tests on a specific compilation target + module format combo
-                "TEST_TS_SOURCE": "true",
-                // "TEST_TS_SOURCE": "false",
-                // "TEST_TARGET": "es5",
-                // "TEST_MODULE": "umd"
-            },
-            "args": [
-                "-i",
-                "test/unit/",
-                // "test/unit/builders/",
-
-                // Uncomment any of these to run individual test suites
-                // "test/unit/builders/builder-tests.ts",
-                // "test/unit/builders/int64-tests.ts",
-                // "test/unit/builders/uint64-tests.ts",
-                // "test/unit/builders/date-tests.ts",
-                // "test/unit/builders/primitive-tests.ts",
-                // "test/unit/builders/dictionary-tests.ts",
-                // "test/unit/builders/utf8-tests.ts",
-
-                // "test/unit/int-tests.ts",
-                // "test/unit/math-tests.ts",
-                // "test/unit/table-tests.ts",
-                // "test/unit/generated-data-tests.ts",
-
-                // "test/unit/table/assign-tests.ts",
-                // "test/unit/table/serialize-tests.ts",
-                // "test/unit/recordbatch/record-batch-tests.ts",
-
-                // "test/unit/vector/vector-tests.ts",
-                // "test/unit/vector/bool-vector-tests.ts",
-                // "test/unit/vector/date-vector-tests.ts",
-                // "test/unit/vector/numeric-vector-tests.ts",
-
-                // "test/unit/visitor-tests.ts",
-
-                // "test/unit/ipc/message-reader-tests.ts",
-                // "test/unit/ipc/reader/file-reader-tests.ts",
-                // "test/unit/ipc/reader/json-reader-tests.ts",
-                // "test/unit/ipc/reader/from-inference-tests.ts",
-                // "test/unit/ipc/reader/stream-reader-tests.ts",
-                // "test/unit/ipc/reader/streams-dom-tests.ts",
-                // "test/unit/ipc/reader/streams-node-tests.ts",
-                // "test/unit/ipc/writer/file-writer-tests.ts",
-                // "test/unit/ipc/writer/json-writer-tests.ts",
-                // "test/unit/ipc/writer/stream-writer-tests.ts",
-                // "test/unit/ipc/writer/streams-dom-tests.ts",
-                // "test/unit/ipc/writer/streams-node-tests.ts",
-            ]
-        },
-        {
-            "type": "node",
-            "request": "launch",
-            "name": "Debug Integration Tests",
-            "cwd": "${workspaceRoot}",
-            "program": "${workspaceFolder}/bin/integration.js",
-            "skipFiles": [
-                "<node_internals>/**/*.js",
-                "${workspaceFolder}/node_modules/**/*.js"
-            ],
-            "env": {
-                "NODE_NO_WARNINGS": "1",
-                "READABLE_STREAM": "disable"
-            },
-            "args": [
-                "--mode", "VALIDATE"
-            ]
-        },
-        {
-            "type": "node",
-            "request": "launch",
-            "name": "Debug bin/arrow2csv",
-            "env": { "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" },
-            "runtimeArgs": ["-r", "ts-node/register"],
-            "console": "integratedTerminal",
-            "skipFiles": [
-                "<node_internals>/**/*.js",
-                "${workspaceFolder}/node_modules/**/*.js"
-            ],
-            "args": [
-                "${workspaceFolder}/src/bin/arrow2csv.ts",
-                "-f", "./test/data/cpp/stream/simple.arrow"
-            ]
-        },
-        {
-            "type": "node",
-            "request": "launch",
-            "name": "Debug bin/file-to-stream",
-            "env": { "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" },
-            "runtimeArgs": ["-r", "ts-node/register"],
-            "skipFiles": [
-                "<node_internals>/**/*.js",
-                "${workspaceFolder}/node_modules/**/*.js"
-            ],
-            "args": [
-                "${workspaceFolder}/bin/file-to-stream.js",
-                "./test/data/cpp/file/struct_example.arrow",
-                "./struct_example-stream-out.arrow",
-            ]
-        },
-        {
-            "type": "node",
-            "request": "launch",
-            "name": "Debug bin/stream-to-file",
-            "env": { "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" },
-            "runtimeArgs": ["-r", "ts-node/register"],
-            "skipFiles": [
-                "<node_internals>/**/*.js",
-                "${workspaceFolder}/node_modules/**/*.js"
-            ],
-            "args": [
-                "${workspaceFolder}/bin/stream-to-file.js",
-                "./test/data/cpp/stream/struct_example.arrow",
-                "./struct_example-file-out.arrow",
-            ]
-        },
-        {
-            "type": "node",
-            "request": "launch",
-            "name": "Debug bin/json-to-arrow",
-            "env": { "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" },
-            "runtimeArgs": ["-r", "ts-node/register"],
-            "skipFiles": [
-                "<node_internals>/**/*.js",
-                "${workspaceFolder}/node_modules/**/*.js"
-            ],
-            "args": [
-                "${workspaceFolder}/bin/json-to-arrow.js",
-                "-j", "./test/data/json/struct_example.json",
-                "-a", "./struct_example-stream-out.arrow",
-                "-f", "stream"
-            ]
-        },
-        {
-            "type": "node",
-            "request": "launch",
-            "name": "Debug bin/print-buffer-alignment",
-            "env": { "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" },
-            "runtimeArgs": ["-r", "ts-node/register"],
-            "skipFiles": [
-                "<node_internals>/**/*.js",
-                "${workspaceFolder}/node_modules/**/*.js"
-            ],
-            "args": [
-                "${workspaceFolder}/bin/print-buffer-alignment.js",
-                "./test/data/cpp/stream/struct_example.arrow"
-            ]
-        }
-    ]
-}
diff --git a/js/DEVELOP.md b/js/DEVELOP.md
deleted file mode 100644
index 952a5f2..0000000
--- a/js/DEVELOP.md
+++ /dev/null
@@ -1,114 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Getting Involved
-
-Even if you do not plan to contribute to Apache Arrow itself or Arrow
-integrations in other projects, we'd be happy to have you involved:
-
-* Join the mailing list: send an email to [dev-subscribe@arrow.apache.org][1].
-  Share your ideas and use cases for the project.
-* [Follow our activity on JIRA][3]
-* [Learn the format][2]
-* Contribute code to one of the reference implementations
-
-We prefer to receive contributions in the form of GitHub pull requests.
-Please send pull requests against the [github.com/apache/arrow][4] repository.
-
-If you are looking for some ideas on what to contribute, check out the [JIRA
-issues][3] for the Apache Arrow project. Comment on the issue and/or contact
-[dev@arrow.apache.org](http://mail-archives.apache.org/mod_mbox/arrow-dev/)
-with your questions and ideas.
-
-If you’d like to report a bug but don’t have time to fix it, you can still post
-it on JIRA, or email the mailing list
-[dev@arrow.apache.org](http://mail-archives.apache.org/mod_mbox/arrow-dev/)
-
-# The package.json scripts
-
-We use [yarn](https://yarnpkg.com/) to install dependencies and run scrips.
-
-* `yarn clean` - cleans targets
-* `yarn build` - cleans and compiles all targets
-* `yarn test` - executes tests against built targets
-
-These scripts accept argument lists of targets × modules:
-
-* Available `targets` are `es5`, `es2015`, `esnext`, and `all` (default: `all`)
-* Available `modules` are `cjs`, `esm`, `umd`, and `all` (default: `all`)
-
-Examples:
-
-* `yarn build` -- builds all ES targets in all module formats
-* `yarn build -t es5 -m all` -- builds the ES5 target in all module formats
-* `yarn build -t all -m cjs` -- builds all ES targets in the CommonJS module format
-* `yarn build -t es5 -t es2015 -m all` -- builds the ES5 and ES2015 targets in all module formats
-* `yarn build -t es5 -m cjs -m esm` -- builds the ES5 target in CommonJS and ESModules module formats
-
-This argument configuration also applies to `clean` and `test` scripts.
-
-To run tests on the bundles, you need to build them first.
-To run tests directly on the sources without bundling, use the `src` target (e.g. `yarn test -t src`).
-
-* `yarn deploy`
-
-Uses [lerna](https://github.com/lerna/lerna) to publish each build target to npm with [conventional](https://conventionalcommits.org/) [changelogs](https://github.com/conventional-changelog/conventional-changelog/tree/master/packages/conventional-changelog-cli).
-
-# Updating the Arrow format flatbuffers generated code
-
-1. Once generated, the flatbuffers format code needs to be adjusted for our build scripts (assumes `gnu-sed`):
-
-    ```shell
-    cd $ARROW_HOME
-
-    # Create a tmpdir to store modified flatbuffers schemas
-    tmp_format_dir=$(mktemp -d)
-    cp ./format/*.fbs $tmp_format_dir
-
-    # Remove namespaces from the flatbuffers schemas
-    sed -i '+s+namespace org.apache.arrow.flatbuf;++ig' $tmp_format_dir/*.fbs
-    sed -i '+s+org.apache.arrow.flatbuf.++ig' $tmp_format_dir/*.fbs
-
-    # Generate TS source from the modified Arrow flatbuffers schemas
-    flatc --ts --no-ts-reexport -o ./js/src/fb $tmp_format_dir/{File,Schema,Message}.fbs
-
-    # Remove the tmpdir
-    rm -rf $tmp_format_dir
-
-    cd ./js/src/fb
-
-    # Rename the existing files to <filename>.bak.ts
-    mv File{,.bak}.ts && mv Schema{,.bak}.ts && mv Message{,.bak}.ts
-
-    # Remove `_generated` from the ES6 imports of the generated files
-    sed -i '+s+_generated\";+\";+ig' *_generated.ts
-    # Fix all the `flatbuffers` imports
-    sed -i '+s+./flatbuffers+flatbuffers+ig' *_generated.ts
-    # Fix the Union createTypeIdsVector typings
-    sed -i -r '+s+static createTypeIdsVector\(builder: flatbuffers.Builder, data: number\[\] \| Uint8Array+static createTypeIdsVector\(builder: flatbuffers.Builder, data: number\[\] \| Int32Array+ig' Schema_generated.ts
-    # Remove "_generated" suffix from TS files
-    mv File{_generated,}.ts && mv Schema{_generated,}.ts && mv Message{_generated,}.ts
-    ```
-
-2. Execute `yarn lint` from the `js` directory to fix the linting errors
-
-[1]: mailto:dev-subscribe@arrow.apache.org
-[2]: https://github.com/apache/arrow/tree/master/format
-[3]: https://issues.apache.org/jira/browse/ARROW
-[4]: https://github.com/apache/arrow
diff --git a/js/README.md b/js/README.md
deleted file mode 100644
index 586ecea..0000000
--- a/js/README.md
+++ /dev/null
@@ -1,280 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# [Apache Arrow](https://github.com/apache/arrow) in JS
-
-[![npm version](https://img.shields.io/npm/v/apache-arrow.svg)](https://www.npmjs.com/package/apache-arrow)
-[![Build Status](https://travis-ci.org/apache/arrow.svg?branch=master)](https://travis-ci.org/apache/arrow)
-
-Arrow is a set of technologies that enable big data systems to process and transfer data quickly.
-
-## Install `apache-arrow` from NPM
-
-`npm install apache-arrow` or `yarn add apache-arrow`
-
-(read about how we [package apache-arrow](#packaging) below)
-
-# Powering Columnar In-Memory Analytics
-
-[Apache Arrow](https://github.com/apache/arrow) is a columnar memory layout specification for encoding vectors and table-like containers of flat and nested data. The Arrow spec aligns columnar data in memory to minimize cache misses and take advantage of the latest SIMD (Single input multiple data) and GPU operations on modern processors.
-
-Apache Arrow is the emerging standard for large in-memory columnar data ([Spark](https://spark.apache.org/), [Pandas](https://wesmckinney.com/blog/pandas-and-apache-arrow/), [Drill](https://drill.apache.org/), [Graphistry](https://www.graphistry.com), ...). By standardizing on a common binary interchange format, big data systems can reduce the costs and friction associated with cross-system communication.
-
-# Get Started
-
-Check out our [API documentation][7] to learn more about how to use Apache Arrow's JS implementation. You can also learn by example by checking out some of the following resources:
-
-* [Observable: Introduction to Apache Arrow][5]
-* [Observable: Manipulating flat arrays arrow-style][6]
-* [Observable: Rich columnar data tables - Dictionary-encoded strings, 64bit ints, and nested structs][8]
-* [/js/test/unit](https://github.com/apache/arrow/tree/master/js/test/unit) - Unit tests for Table and Vector
-
-## Cookbook
-
-### Get a table from an Arrow file on disk (in IPC format)
-
-```js
-import { readFileSync } from 'fs';
-import { Table } from 'apache-arrow';
-
-const arrow = readFileSync('simple.arrow');
-const table = Table.from([arrow]);
-
-console.log(table.toString());
-
-/*
- foo,  bar,  baz
-   1,    1,   aa
-null, null, null
-   3, null, null
-   4,    4,  bbb
-   5,    5, cccc
-*/
-```
-
-### Create a Table when the Arrow file is split across buffers
-
-```js
-import { readFileSync } from 'fs';
-import { Table } from 'apache-arrow';
-
-const table = Table.from([
-    'latlong/schema.arrow',
-    'latlong/records.arrow'
-].map((file) => readFileSync(file)));
-
-console.log(table.toString());
-
-/*
-        origin_lat,         origin_lon
-35.393089294433594,  -97.6007308959961
-35.393089294433594,  -97.6007308959961
-35.393089294433594,  -97.6007308959961
-29.533695220947266, -98.46977996826172
-29.533695220947266, -98.46977996826172
-*/
-```
-
-### Create a Table from JavaScript arrays
-
-```js
-import {
-  Table,
-  FloatVector,
-  DateVector
-} from 'apache-arrow';
-
-const LENGTH = 2000;
-
-const rainAmounts = Float32Array.from(
-  { length: LENGTH },
-  () => Number((Math.random() * 20).toFixed(1)));
-
-const rainDates = Array.from(
-  { length: LENGTH },
-  (_, i) => new Date(Date.now() - 1000 * 60 * 60 * 24 * i));
-
-const rainfall = Table.new(
-  [FloatVector.from(rainAmounts), DateVector.from(rainDates)],
-  ['precipitation', 'date']
-);
-```
-
-### Load data with `fetch`
-
-```js
-import { Table } from "apache-arrow";
-
-const table = await Table.from(fetch("/simple.arrow"));
-console.log(table.toString());
-
-```
-
-### Columns look like JS Arrays
-
-```js
-import { readFileSync } from 'fs';
-import { Table } from 'apache-arrow';
-
-const table = Table.from([
-    'latlong/schema.arrow',
-    'latlong/records.arrow'
-].map(readFileSync));
-
-const column = table.getColumn('origin_lat');
-
-// Copy the data into a TypedArray
-const typed = column.toArray();
-assert(typed instanceof Float32Array);
-
-for (let i = -1, n = column.length; ++i < n;) {
-    assert(column.get(i) === typed[i]);
-}
-```
-
-### Usage with MapD Core
-
-```js
-import MapD from 'rxjs-mapd';
-import { Table } from 'apache-arrow';
-
-const port = 9091;
-const host = `localhost`;
-const db = `mapd`;
-const user = `mapd`;
-const password = `HyperInteractive`;
-
-MapD.open(host, port)
-  .connect(db, user, password)
-  .flatMap((session) =>
-    // queryDF returns Arrow buffers
-    session.queryDF(`
-      SELECT origin_city
-      FROM flights
-      WHERE dest_city ILIKE 'dallas'
-      LIMIT 5`
-    ).disconnect()
-  )
-  .map(([schema, records]) =>
-    // Create Arrow Table from results
-    Table.from([schema, records]))
-  .map((table) =>
-    // Stringify the table to CSV with row numbers
-    table.toString({ index: true }))
-  .subscribe((csvStr) =>
-    console.log(csvStr));
-/*
-Index,   origin_city
-    0, Oklahoma City
-    1, Oklahoma City
-    2, Oklahoma City
-    3,   San Antonio
-    4,   San Antonio
-*/
-```
-
-# Getting involved
-
-See [DEVELOP.md](DEVELOP.md)
-
-Even if you do not plan to contribute to Apache Arrow itself or Arrow
-integrations in other projects, we'd be happy to have you involved:
-
-* Join the mailing list: send an email to
-  [dev-subscribe@arrow.apache.org][1]. Share your ideas and use cases for the
-  project.
-* [Follow our activity on JIRA][3]
-* [Learn the format][2]
-* Contribute code to one of the reference implementations
-
-We prefer to receive contributions in the form of GitHub pull requests. Please send pull requests against the [github.com/apache/arrow][4] repository.
-
-If you are looking for some ideas on what to contribute, check out the [JIRA
-issues][3] for the Apache Arrow project. Comment on the issue and/or contact
-[dev@arrow.apache.org](https://mail-archives.apache.org/mod_mbox/arrow-dev/)
-with your questions and ideas.
-
-If you’d like to report a bug but don’t have time to fix it, you can still post
-it on JIRA, or email the mailing list
-[dev@arrow.apache.org](https://mail-archives.apache.org/mod_mbox/arrow-dev/)
-
-## Packaging
-
-`apache-arrow` is written in TypeScript, but the project is compiled to multiple JS versions and common module formats.
-
-The base `apache-arrow` package includes all the compilation targets for convenience, but if you're conscientious about your `node_modules` footprint, we got you.
-
-The targets are also published under the `@apache-arrow` namespace:
-
-```sh
-npm install apache-arrow # <-- combined es2015/UMD + esnext/CommonJS/ESModules/UMD
-npm install @apache-arrow/ts # standalone TypeScript package
-npm install @apache-arrow/es5-cjs # standalone es5/CommonJS package
-npm install @apache-arrow/es5-esm # standalone es5/ESModules package
-npm install @apache-arrow/es5-umd # standalone es5/UMD package
-npm install @apache-arrow/es2015-cjs # standalone es2015/CommonJS package
-npm install @apache-arrow/es2015-esm # standalone es2015/ESModules package
-npm install @apache-arrow/es2015-umd # standalone es2015/UMD package
-npm install @apache-arrow/esnext-cjs # standalone esNext/CommonJS package
-npm install @apache-arrow/esnext-esm # standalone esNext/ESModules package
-npm install @apache-arrow/esnext-umd # standalone esNext/UMD package
-```
-
-### Why we package like this
-
-The JS community is a diverse group with a varied list of target environments and tool chains. Publishing multiple packages accommodates projects of all stripes.
-
-If you think we missed a compilation target and it's a blocker for adoption, please open an issue.
-
-# People
-
-Full list of broader Apache Arrow [committers](https://arrow.apache.org/committers/).
-
-* Brian Hulette,  _committer_
-* Paul Taylor, Graphistry, Inc.,  _committer_
-
-# Powered By Apache Arrow in JS
-
-Full list of broader Apache Arrow [projects & organizations](https://arrow.apache.org/powered_by/).
-
-## Open Source Projects
-
-* [Apache Arrow](https://arrow.apache.org) -- Parent project for Powering Columnar In-Memory Analytics, including affiliated open source projects
-* [rxjs-mapd](https://github.com/graphistry/rxjs-mapd) -- A MapD Core node-driver that returns query results as Arrow columns
-* [Perspective](https://github.com/jpmorganchase/perspective) -- Perspective is a streaming data visualization engine by J.P. Morgan for JavaScript for building real-time & user-configurable analytics entirely in the browser.
-* [Falcon](https://github.com/uwdata/falcon) is a visualization tool for linked interactions across multiple aggregate visualizations of millions or billions of records.
-
-## Companies & Organizations
-
-* [CCRi](https://www.ccri.com/) -- Commonwealth Computer Research Inc, or CCRi, is a Central Virginia based data science and software engineering company
-* [GOAI](https://gpuopenanalytics.com/) -- GPU Open Analytics Initiative standardizes on Arrow as part of creating common data frameworks that enable developers and statistical researchers to accelerate data science on GPUs
-* [Graphistry, Inc.](https://www.graphistry.com/) - An end-to-end GPU accelerated visual investigation platform used by teams for security, anti-fraud, and related investigations. Graphistry uses Arrow in its NodeJS GPU backend and client libraries, and is an early contributing member to GOAI and Arrow\[JS\] working to bring these technologies to the enterprise.
-
-# License
-
-[Apache 2.0](https://github.com/apache/arrow/blob/master/LICENSE)
-
-[1]: mailto:dev-subscribe@arrow.apache.org
-[2]: https://github.com/apache/arrow/tree/master/format
-[3]: https://issues.apache.org/jira/browse/ARROW
-[4]: https://github.com/apache/arrow
-[5]: https://beta.observablehq.com/@theneuralbit/introduction-to-apache-arrow
-[6]: https://beta.observablehq.com/@lmeyerov/manipulating-flat-arrays-arrow-style
-[7]: https://arrow.apache.org/docs/js/
-[8]: https://observablehq.com/@lmeyerov/rich-data-types-in-apache-arrow-js-efficient-data-tables-wit
diff --git a/js/bin/arrow2csv.js b/js/bin/arrow2csv.js
deleted file mode 100755
index 0e446fa..0000000
--- a/js/bin/arrow2csv.js
+++ /dev/null
@@ -1,28 +0,0 @@
-#! /usr/bin/env node
-
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const Path = require(`path`);
-const here = Path.resolve(__dirname, '../');
-const tsnode = require.resolve(`ts-node/register`);
-const arrow2csv = Path.join(here, `src/bin/arrow2csv.ts`);
-const env = { ...process.env, TS_NODE_TRANSPILE_ONLY: `true` };
-
-require('child_process').spawn(`node`, [
-    `-r`, tsnode, arrow2csv, ...process.argv.slice(2)
-], { cwd: here, env, stdio: `inherit` });
diff --git a/js/bin/file-to-stream.js b/js/bin/file-to-stream.js
deleted file mode 100755
index 090cd0b..0000000
--- a/js/bin/file-to-stream.js
+++ /dev/null
@@ -1,40 +0,0 @@
-#! /usr/bin/env node
-
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-// @ts-check
-
-const fs = require('fs');
-const path = require('path');
-const eos = require('util').promisify(require('stream').finished);
-const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '';
-const { RecordBatchReader, RecordBatchStreamWriter } = require(`../index${extension}`);
-
-(async () => {
-
-    const readable = process.argv.length < 3 ? process.stdin : fs.createReadStream(path.resolve(process.argv[2]));
-    const writable = process.argv.length < 4 ? process.stdout : fs.createWriteStream(path.resolve(process.argv[3]));
-
-    const fileToStream = readable
-        .pipe(RecordBatchReader.throughNode())
-        .pipe(RecordBatchStreamWriter.throughNode())
-        .pipe(writable);
-
-    await eos(fileToStream);
-
-})().catch((e) => { console.error(e); process.exit(1); });
diff --git a/js/bin/integration.js b/js/bin/integration.js
deleted file mode 100755
index 2e5f16b..0000000
--- a/js/bin/integration.js
+++ /dev/null
@@ -1,255 +0,0 @@
-#! /usr/bin/env node
-
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-// @ts-nocheck
-
-const fs = require('fs');
-const Path = require('path');
-const { promisify } = require('util');
-const glob = promisify(require('glob'));
-const { zip } = require('ix/iterable/zip');
-const { parse: bignumJSONParse } = require('json-bignum');
-const argv = require(`command-line-args`)(cliOpts(), { partial: true });
-const {
-    Table,
-    RecordBatchReader,
-    util: { createElementComparator }
-} = require('../targets/apache-arrow/');
-
-const exists = async (p) => {
-    try {
-        return !!(await fs.promises.stat(p));
-    } catch (e) { return false; }
-}
-
-(async () => {
-
-    if (!argv.mode) { return print_usage(); }
-
-    let mode = argv.mode.toUpperCase();
-    let jsonPaths = [...(argv.json || [])];
-    let arrowPaths = [...(argv.arrow || [])];
-
-    if (mode === 'VALIDATE' && !jsonPaths.length) {
-        [jsonPaths, arrowPaths] = await loadLocalJSONAndArrowPathsForDebugging(jsonPaths, arrowPaths);
-    }
-
-    if (!jsonPaths.length) { return print_usage(); }
-
-    switch (mode) {
-        case 'VALIDATE':
-            for (let [jsonPath, arrowPath] of zip(jsonPaths, arrowPaths)) {
-                await validate(jsonPath, arrowPath);
-            }
-            break;
-        default:
-            return print_usage();
-    }
-})()
-.then((x) => +x || 0, (e) => {
-    e && process.stderr.write(`${e && e.stack || e}\n`);
-    return process.exitCode || 1;
-}).then((code) => process.exit(code));
-
-function cliOpts() {
-    return [
-        {
-            type: String,
-            name: 'mode',
-            description: 'The integration test to run'
-        },
-        {
-            type: String,
-            name: 'arrow', alias: 'a',
-            multiple: true, defaultValue: [],
-            description: 'The Arrow file[s] to read/write'
-        },
-        {
-            type: String,
-            name: 'json', alias: 'j',
-            multiple: true, defaultValue: [],
-            description: 'The JSON file[s] to read/write'
-        }
-    ];
-}
-
-function print_usage() {
-    console.log(require('command-line-usage')([
-        {
-            header: 'integration',
-            content: 'Script for running Arrow integration tests'
-        },
-        {
-            header: 'Synopsis',
-            content: [
-                '$ integration.js -j file.json -a file.arrow --mode validate'
-            ]
-        },
-        {
-            header: 'Options',
-            optionList: [
-                ...cliOpts(),
-                {
-                    name: 'help',
-                    description: 'Print this usage guide.'
-                }
-            ]
-        },
-    ]));
-    return 1;
-}
-
-async function validate(jsonPath, arrowPath) {
-
-    const files = await Promise.all([
-        fs.promises.readFile(arrowPath),
-        fs.promises.readFile(jsonPath, 'utf8'),
-    ]);
-
-    const arrowData = files[0];
-    const jsonData = bignumJSONParse(files[1]);
-
-    validateReaderIntegration(jsonData, arrowData);
-    validateTableFromBuffersIntegration(jsonData, arrowData);
-    validateTableToBuffersIntegration('json', 'file')(jsonData, arrowData);
-    validateTableToBuffersIntegration('json', 'file')(jsonData, arrowData);
-    validateTableToBuffersIntegration('binary', 'file')(jsonData, arrowData);
-    validateTableToBuffersIntegration('binary', 'file')(jsonData, arrowData);
-}
-
-function validateReaderIntegration(jsonData, arrowBuffer) {
-    const msg = `json and arrow record batches report the same values`;
-    try {
-        const jsonReader = RecordBatchReader.from(jsonData);
-        const binaryReader = RecordBatchReader.from(arrowBuffer);
-        for (const [jsonRecordBatch, binaryRecordBatch] of zip(jsonReader, binaryReader)) {
-            compareTableIsh(jsonRecordBatch, binaryRecordBatch);
-        }
-    } catch (e) { throw new Error(`${msg}: fail \n ${e && e.stack || e}`); }
-    process.stdout.write(`${msg}: pass\n`);
-}
-
-function validateTableFromBuffersIntegration(jsonData, arrowBuffer) {
-    const msg = `json and arrow tables report the same values`;
-    try {
-        const jsonTable = Table.from(jsonData);
-        const binaryTable = Table.from(arrowBuffer);
-        compareTableIsh(jsonTable, binaryTable);
-    } catch (e) { throw new Error(`${msg}: fail \n ${e && e.stack || e}`); }
-    process.stdout.write(`${msg}: pass\n`);
-}
-
-function validateTableToBuffersIntegration(srcFormat, arrowFormat) {
-    const refFormat = srcFormat === `json` ? `binary` : `json`;
-    return function testTableToBuffersIntegration(jsonData, arrowBuffer) {
-        const msg = `serialized ${srcFormat} ${arrowFormat} reports the same values as the ${refFormat} ${arrowFormat}`;
-        try {
-            const refTable = Table.from(refFormat === `json` ? jsonData : arrowBuffer);
-            const srcTable = Table.from(srcFormat === `json` ? jsonData : arrowBuffer);
-            const dstTable = Table.from(srcTable.serialize(`binary`, arrowFormat === `stream`));
-            compareTableIsh(dstTable, refTable);
-        } catch (e) { throw new Error(`${msg}: fail \n ${e && e.stack || e}`); }
-        process.stdout.write(`${msg}: pass\n`);
-    };
-}
-
-function compareTableIsh(actual, expected) {
-    if (actual.length !== expected.length) {
-        throw new Error(`length: ${actual.length} !== ${expected.length}`);
-    }
-    if (actual.numCols !== expected.numCols) {
-        throw new Error(`numCols: ${actual.numCols} !== ${expected.numCols}`);
-    }
-    (() => {
-        const getChildAtFn = expected instanceof Table ? 'getColumnAt' : 'getChildAt';
-        for (let i = -1, n = actual.numCols; ++i < n;) {
-            const v1 = actual[getChildAtFn](i);
-            const v2 = expected[getChildAtFn](i);
-            compareVectors(v1, v2);
-        }
-    })();
-}
-
-function compareVectors(actual, expected) {
-
-    if ((actual == null && expected != null) || (expected == null && actual != null)) {
-        throw new Error(`${actual == null ? `actual` : `expected`} is null, was expecting ${actual == null ? expected : actual} to be that also`);
-    }
-
-    let props = ['type', 'length', 'nullCount'];
-
-    (() => {
-        for (let i = -1, n = props.length; ++i < n;) {
-            const prop = props[i];
-            if (`${actual[prop]}` !== `${expected[prop]}`) {
-                throw new Error(`${prop}: ${actual[prop]} !== ${expected[prop]}`);
-            }
-        }
-    })();
-
-    (() => {
-        for (let i = -1, n = actual.length; ++i < n;) {
-            let x1 = actual.get(i), x2 = expected.get(i);
-            if (!createElementComparator(x2)(x1)) {
-                throw new Error(`${i}: ${x1} !== ${x2}`);
-            }
-        }
-    })();
-
-    (() => {
-        let i = -1;
-        for (let [x1, x2] of zip(actual, expected)) {
-            ++i;
-            if (!createElementComparator(x2)(x1)) {
-                throw new Error(`${i}: ${x1} !== ${x2}`);
-            }
-        }
-    })();
-}
-
-async function loadLocalJSONAndArrowPathsForDebugging(jsonPaths, arrowPaths) {
-
-    const sourceJSONPaths = await glob(Path.resolve(__dirname, `../test/data/json/`, `*.json`));
-
-    if (!arrowPaths.length) {
-        await loadJSONAndArrowPaths(sourceJSONPaths, jsonPaths, arrowPaths, 'cpp', 'file');
-        await loadJSONAndArrowPaths(sourceJSONPaths, jsonPaths, arrowPaths, 'java', 'file');
-        await loadJSONAndArrowPaths(sourceJSONPaths, jsonPaths, arrowPaths, 'cpp', 'stream');
-        await loadJSONAndArrowPaths(sourceJSONPaths, jsonPaths, arrowPaths, 'java', 'stream');
-    }
-
-    for (let [jsonPath, arrowPath] of zip(jsonPaths, arrowPaths)) {
-        console.log(`jsonPath: ${jsonPath}`);
-        console.log(`arrowPath: ${arrowPath}`);
-    }
-
-    return [jsonPaths, arrowPaths];
-
-    async function loadJSONAndArrowPaths(sourceJSONPaths, jsonPaths, arrowPaths, source, format) {
-        for (const jsonPath of sourceJSONPaths) {
-            const { name } = Path.parse(jsonPath);
-            const arrowPath = Path.resolve(__dirname, `../test/data/${source}/${format}/${name}.arrow`);
-            if (await exists(arrowPath)) {
-                jsonPaths.push(jsonPath);
-                arrowPaths.push(arrowPath);
-            }
-        }
-        return [jsonPaths, arrowPaths];
-    }
-}
diff --git a/js/bin/json-to-arrow.js b/js/bin/json-to-arrow.js
deleted file mode 100755
index 8f3fbd3..0000000
--- a/js/bin/json-to-arrow.js
+++ /dev/null
@@ -1,108 +0,0 @@
-#! /usr/bin/env node
-
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-// @ts-check
-
-const fs = require('fs');
-const Path = require('path');
-const { parse } = require('json-bignum');
-const eos = require('util').promisify(require('stream').finished);
-const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '';
-const argv = require(`command-line-args`)(cliOpts(), { partial: true });
-const { RecordBatchReader, RecordBatchFileWriter, RecordBatchStreamWriter } = require(`../index${extension}`);
-
-const jsonPaths = [...(argv.json || [])];
-const arrowPaths = [...(argv.arrow || [])];
-
-(async () => {
-
-    if (!jsonPaths.length || !arrowPaths.length || (jsonPaths.length !== arrowPaths.length)) {
-        return print_usage();
-    }
-
-    await Promise.all(jsonPaths.map(async (path, i) => {
-
-        const RecordBatchWriter = argv.format !== 'stream'
-            ? RecordBatchFileWriter
-            : RecordBatchStreamWriter;
-
-        const reader = RecordBatchReader.from(parse(
-            await fs.promises.readFile(Path.resolve(path), 'utf8')));
-
-        const jsonToArrow = reader
-            .pipe(RecordBatchWriter.throughNode())
-            .pipe(fs.createWriteStream(arrowPaths[i]));
-
-        await eos(jsonToArrow);
-
-    }));
-})()
-.then((x) => +x || 0, (e) => {
-    e && process.stderr.write(`${e}`);
-    return process.exitCode || 1;
-}).then((code = 0) => process.exit(code));
-
-function cliOpts() {
-    return [
-        {
-            type: String,
-            name: 'format', alias: 'f',
-            multiple: false, defaultValue: 'file',
-            description: 'The Arrow format to write, either "file" or "stream"'
-        },
-        {
-            type: String,
-            name: 'arrow', alias: 'a',
-            multiple: true, defaultValue: [],
-            description: 'The Arrow file[s] to write'
-        },
-        {
-            type: String,
-            name: 'json', alias: 'j',
-            multiple: true, defaultValue: [],
-            description: 'The JSON file[s] to read'
-        }
-    ];
-}
-
-function print_usage() {
-    console.log(require('command-line-usage')([
-        {
-            header: 'json-to-arrow',
-            content: 'Script for converting a JSON Arrow file to a binary Arrow file'
-        },
-        {
-            header: 'Synopsis',
-            content: [
-                '$ json-to-arrow.js -j in.json -a out.arrow -f stream'
-            ]
-        },
-        {
-            header: 'Options',
-            optionList: [
-                ...cliOpts(),
-                {
-                    name: 'help',
-                    description: 'Print this usage guide.'
-                }
-            ]
-        },
-    ]));
-    return 1;
-}
diff --git a/js/bin/print-buffer-alignment.js b/js/bin/print-buffer-alignment.js
deleted file mode 100755
index 4c32603..0000000
--- a/js/bin/print-buffer-alignment.js
+++ /dev/null
@@ -1,81 +0,0 @@
-#! /usr/bin/env node
-
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-// @ts-check
-
-const fs = require('fs');
-const path = require('path');
-const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '';
-const { RecordBatch, AsyncMessageReader } = require(`../index${extension}`);
-const { VectorLoader } = require(`../targets/apache-arrow/visitor/vectorloader`);
-
-(async () => {
-
-    const readable = process.argv.length < 3 ? process.stdin : fs.createReadStream(path.resolve(process.argv[2]));
-    const reader = new AsyncMessageReader(readable);
-
-    let schema, recordBatchIndex = 0, dictionaryBatchIndex = 0;
-
-    for await (let message of reader) {
-
-        let bufferRegions = [];
-
-        if (message.isSchema()) {
-            schema = message.header();
-            continue;
-        } else if (message.isRecordBatch()) {
-            const header = message.header();
-            bufferRegions = header.buffers;
-            const body = await reader.readMessageBody(message.bodyLength);
-            const recordBatch = loadRecordBatch(schema, header, body);
-            console.log(`record batch ${++recordBatchIndex}: ${JSON.stringify({
-                offset: body.byteOffset,
-                length: body.byteLength,
-                numRows: recordBatch.length,
-            })}`);
-        } else if (message.isDictionaryBatch()) {
-            const header = message.header();
-            bufferRegions = header.data.buffers;
-            const type = schema.dictionaries.get(header.id);
-            const body = await reader.readMessageBody(message.bodyLength);
-            const recordBatch = loadDictionaryBatch(header.data, body, type);
-            console.log(`dictionary batch ${++dictionaryBatchIndex}: ${JSON.stringify({
-                offset: body.byteOffset,
-                length: body.byteLength,
-                numRows: recordBatch.length,
-                dictionaryId: header.id,
-            })}`);
-        }
-
-        bufferRegions.forEach(({ offset, length }, i) => {
-            console.log(`\tbuffer ${i + 1}: { offset: ${offset},  length: ${length} }`);
-        });
-    }
-
-    await reader.return();
-
-})().catch((e) => { console.error(e); process.exit(1); });
-
-function loadRecordBatch(schema, header, body) {
-    return new RecordBatch(schema, header.length, new VectorLoader(body, header.nodes, header.buffers, new Map()).visitMany(schema.fields));
-}
-
-function loadDictionaryBatch(header, body, dictionaryType) {
-    return RecordBatch.new(new VectorLoader(body, header.nodes, header.buffers, new Map()).visitMany([dictionaryType]));
-}
diff --git a/js/bin/stream-to-file.js b/js/bin/stream-to-file.js
deleted file mode 100755
index 015a5ea..0000000
--- a/js/bin/stream-to-file.js
+++ /dev/null
@@ -1,40 +0,0 @@
-#! /usr/bin/env node
-
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-// @ts-check
-
-const fs = require('fs');
-const path = require('path');
-const eos = require('util').promisify(require('stream').finished);
-const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '';
-const { RecordBatchReader, RecordBatchFileWriter } = require(`../index${extension}`);
-
-(async () => {
-
-    const readable = process.argv.length < 3 ? process.stdin : fs.createReadStream(path.resolve(process.argv[2]));
-    const writable = process.argv.length < 4 ? process.stdout : fs.createWriteStream(path.resolve(process.argv[3]));
-
-    const streamToFile = readable
-        .pipe(RecordBatchReader.throughNode())
-        .pipe(RecordBatchFileWriter.throughNode())
-        .pipe(writable);
-
-    await eos(streamToFile);
-
-})().catch((e) => { console.error(e); process.exit(1); });
diff --git a/js/examples/read_file.html b/js/examples/read_file.html
deleted file mode 100644
index 1013fbe..0000000
--- a/js/examples/read_file.html
+++ /dev/null
@@ -1,91 +0,0 @@
-<!DOCTYPE html>
-
-<!--
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
--->
-
-<html>
-  <head>
-    <title>Arrow.js browser test</title>
-    <meta charset="utf-8">
-    <style>
-table {
-  border-collapse: collapse;
-}
-table, th, td {
-  border: 1px solid black;
-  white-space: nowrap;
-}
-    </style>
-    <script type="text/javascript">
-var reader = new FileReader();
-function addCell (tr, type, value) {
-  var td = document.createElement(type)
-  td.textContent = value;
-  tr.appendChild(td);
-}
-reader.onload = function (evt) {
-
-  var arrowTable = Arrow.Table.from([new Uint8Array(evt.target.result)]);
-  var thead = document.getElementById("thead");
-  var tbody = document.getElementById("tbody");
-
-  while (thead.hasChildNodes()) {
-      thead.removeChild(thead.lastChild);
-  }
-
-  while (tbody.hasChildNodes()) {
-      tbody.removeChild(tbody.lastChild);
-  }
-
-  var header_row = document.createElement("tr");
-  for (let field of arrowTable.schema.fields) {
-    addCell(header_row, "th", `${field}`);
-  }
-
-  thead.appendChild(header_row);
-
-  for (let row of arrowTable) {
-    var tr = document.createElement("tr");
-    for (let cell of row) {
-      addCell(tr, "td",
-        cell == null ? 'null'
-        : !Array.isArray(cell) ? cell
-        : '[' + cell.map((value) => value == null ? 'null' : value).join(', ') + ']'
-      );
-    }
-    tbody.appendChild(tr);
-  }
-}
-
-function handleFiles(files) {
-  reader.readAsArrayBuffer(files[0]);
-}
-    </script>
-  </head>
-  <body>
-    <input id="arrow-in" type="file" onchange="handleFiles(this.files)" />
-    <table>
-      <thead id="thead">
-      </thead>
-      <tbody id="tbody">
-      </tbody>
-    </table>
-    <script type="text/javascript" src="../targets/es2015/umd/Arrow.js"></script>
-  </body>
-</html>
diff --git a/js/gulp/argv.js b/js/gulp/argv.js
deleted file mode 100644
index 0acdad7..0000000
--- a/js/gulp/argv.js
+++ /dev/null
@@ -1,39 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const argv = require(`command-line-args`)([
-    { name: `all`, type: Boolean },
-    { name: 'verbose', alias: `v`, type: Boolean },
-    { name: `target`, type: String, defaultValue: `` },
-    { name: `module`, type: String, defaultValue: `` },
-    { name: `coverage`, type: Boolean, defaultValue: false },
-    { name: `targets`, alias: `t`, type: String, multiple: true, defaultValue: [] },
-    { name: `modules`, alias: `m`, type: String, multiple: true, defaultValue: [] },
-], { partial: true });
-
-const { targets, modules } = argv;
-
-if (argv.target === `src`) {
-    argv.target && !targets.length && targets.push(argv.target);
-} else {
-    argv.target && !targets.length && targets.push(argv.target);
-    argv.module && !modules.length && modules.push(argv.module);
-    (argv.all || !targets.length) && targets.push(`all`);
-    (argv.all || !modules.length) && modules.push(`all`);
-}
-
-module.exports = { argv, targets, modules };
diff --git a/js/gulp/arrow-task.js b/js/gulp/arrow-task.js
deleted file mode 100644
index 93e9475..0000000
--- a/js/gulp/arrow-task.js
+++ /dev/null
@@ -1,64 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const {
-    targetDir, observableFromStreams
-} = require('./util');
-
-const del = require('del');
-const gulp = require('gulp');
-const mkdirp = require('mkdirp');
-const gulpRename = require(`gulp-rename`);
-const { memoizeTask } = require('./memoize-task');
-const { Observable, ReplaySubject } = require('rxjs');
-const pipeline = require('util').promisify(require('stream').pipeline);
-
-const arrowTask = ((cache) => memoizeTask(cache, function copyMain(target) {
-    const out = targetDir(target);
-    const dtsGlob = `${targetDir(`esnext`, `cjs`)}/**/*.ts`;
-    const cjsGlob = `${targetDir(`esnext`, `cjs`)}/**/*.js`;
-    const esmGlob = `${targetDir(`esnext`, `esm`)}/**/*.js`;
-    const es2015UmdGlob = `${targetDir(`es2015`, `umd`)}/*.js`;
-    const esnextUmdGlob = `${targetDir(`esnext`, `umd`)}/*.js`;
-    const cjsSourceMapsGlob = `${targetDir(`esnext`, `cjs`)}/**/*.map`;
-    const esmSourceMapsGlob = `${targetDir(`esnext`, `esm`)}/**/*.map`;
-    const es2015UmdSourceMapsGlob = `${targetDir(`es2015`, `umd`)}/*.map`;
-    const esnextUmdSourceMapsGlob = `${targetDir(`esnext`, `umd`)}/*.map`;
-    return Observable.forkJoin(
-        observableFromStreams(gulp.src(dtsGlob),                 gulp.dest(out)), // copy d.ts files
-        observableFromStreams(gulp.src(cjsGlob),                 gulp.dest(out)), // copy esnext cjs files
-        observableFromStreams(gulp.src(cjsSourceMapsGlob),       gulp.dest(out)), // copy esnext cjs sourcemaps
-        observableFromStreams(gulp.src(esmSourceMapsGlob),       gulp.dest(out)), // copy esnext esm sourcemaps
-        observableFromStreams(gulp.src(es2015UmdSourceMapsGlob), gulp.dest(out)), // copy es2015 umd sourcemap files, but don't rename
-        observableFromStreams(gulp.src(esnextUmdSourceMapsGlob), gulp.dest(out)), // copy esnext umd sourcemap files, but don't rename
-        observableFromStreams(gulp.src(esmGlob),       gulpRename((p) => { p.extname = '.mjs'; }),          gulp.dest(out)), // copy esnext esm files and rename to `.mjs`
-        observableFromStreams(gulp.src(es2015UmdGlob), gulpRename((p) => { p.basename += `.es2015.min`; }), gulp.dest(out)), // copy es2015 umd files and add `.min`
-        observableFromStreams(gulp.src(esnextUmdGlob), gulpRename((p) => { p.basename += `.esnext.min`; }), gulp.dest(out)), // copy esnext umd files and add `.esnext.min`
-    ).publish(new ReplaySubject()).refCount();
-}))({});
-
-const arrowTSTask = ((cache) => memoizeTask(cache, async function copyTS(target, format) {
-    const out = targetDir(target, format);
-    await mkdirp(out);
-    await pipeline(gulp.src(`src/**/*`), gulp.dest(out));
-    await del(`${out}/**/*.js`);
-}))({});
-  
-  
-module.exports = arrowTask;
-module.exports.arrowTask = arrowTask;
-module.exports.arrowTSTask = arrowTSTask;
diff --git a/js/gulp/clean-task.js b/js/gulp/clean-task.js
deleted file mode 100644
index 551aeb4..0000000
--- a/js/gulp/clean-task.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const del = require('del');
-const { Observable } = require('rxjs');
-const { targetDir } = require('./util');
-const memoizeTask = require('./memoize-task');
-
-const cleanTask = ((cache) => memoizeTask(cache, function clean(target, format) {
-    const dir = targetDir(target, format);
-    return Observable.from(del(dir))
-        .catch((e) => Observable.empty());
-}))({});
-
-module.exports = cleanTask;
-module.exports.cleanTask = cleanTask;
diff --git a/js/gulp/closure-task.js b/js/gulp/closure-task.js
deleted file mode 100644
index d0ecb12..0000000
--- a/js/gulp/closure-task.js
+++ /dev/null
@@ -1,215 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const {
-    targetDir,
-    mainExport,
-    esmRequire,
-    gCCLanguageNames,
-    publicModulePaths,
-    observableFromStreams,
-    shouldRunInChildProcess,
-    spawnGulpCommandInChildProcess,
-} = require('./util');
-
-const fs = require('fs');
-const gulp = require('gulp');
-const path = require('path');
-const mkdirp = require('mkdirp');
-const sourcemaps = require('gulp-sourcemaps');
-const { memoizeTask } = require('./memoize-task');
-const { compileBinFiles } = require('./typescript-task');
-const closureCompiler = require('google-closure-compiler').gulp();
-
-const closureTask = ((cache) => memoizeTask(cache, async function closure(target, format) {
-
-    if (shouldRunInChildProcess(target, format)) {
-        return spawnGulpCommandInChildProcess('compile', target, format);
-    }
-
-    const src = targetDir(target, `cls`);
-    const srcAbsolute = path.resolve(src);
-    const out = targetDir(target, format);
-    const externs = path.join(`${out}/${mainExport}.externs.js`);
-    const entry_point = path.join(`${src}/${mainExport}.dom.cls.js`);
-
-    const exportedImports = publicModulePaths(srcAbsolute).reduce((entries, publicModulePath) => [
-        ...entries, {
-            publicModulePath,
-            exports_: getPublicExportedNames(esmRequire(publicModulePath, { warnings: false }))
-        }
-    ], []);
-
-    await mkdirp(out);
-
-    await Promise.all([
-        fs.promises.writeFile(externs, generateExternsFile(exportedImports)),
-        fs.promises.writeFile(entry_point, generateUMDExportAssignment(srcAbsolute, exportedImports))
-    ]);
-
-    return await Promise.all([
-        runClosureCompileAsObservable().toPromise(),
-        compileBinFiles(target, format).toPromise()
-    ]);
-
-    function runClosureCompileAsObservable() {
-        return observableFromStreams(
-            gulp.src([
-                /* external libs first */
-                `node_modules/flatbuffers/package.json`,
-                `node_modules/flatbuffers/js/flatbuffers.mjs`,
-                `node_modules/text-encoding-utf-8/package.json`,
-                `node_modules/text-encoding-utf-8/src/encoding.js`,
-                `${src}/**/*.js` /* <-- then source globs */
-            ], { base: `./` }),
-            sourcemaps.init(),
-            closureCompiler(createClosureArgs(entry_point, externs), {
-                platform: ['native', 'java', 'javascript']
-            }),
-            // rename the sourcemaps from *.js.map files to *.min.js.map
-            sourcemaps.write(`.`, { mapFile: (mapPath) => mapPath.replace(`.js.map`, `.${target}.min.js.map`) }),
-            gulp.dest(out)
-        );
-    }
-}))({});
-
-module.exports = closureTask;
-module.exports.closureTask = closureTask;
-
-const createClosureArgs = (entry_point, externs) => ({
-    externs,
-    entry_point,
-    third_party: true,
-    warning_level: `QUIET`,
-    dependency_mode: `PRUNE`,
-    rewrite_polyfills: false,
-    module_resolution: `NODE`,
-    // formatting: `PRETTY_PRINT`,
-    // debug: true,
-    compilation_level: `ADVANCED`,
-    package_json_entry_names: `module,jsnext:main,main`,
-    assume_function_wrapper: true,
-    js_output_file: `${mainExport}.js`,
-    language_in: gCCLanguageNames[`esnext`],
-    language_out: gCCLanguageNames[`es5`],
-    output_wrapper:`${apacheHeader()}
-(function (global, factory) {
-    typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
-    typeof define === 'function' && define.amd ? define(['exports'], factory) :
-    (factory(global.Arrow = global.Arrow || {}));
-}(this, (function (exports) {%output%}.bind(this))));`
-});
-
-function generateUMDExportAssignment(src, exportedImports) {
-    return [
-        ...exportedImports.map(({ publicModulePath }, i) => {
-            const p = publicModulePath.slice(src.length + 1);
-            return (`import * as exports${i} from './${p}';`);
-        }).filter(Boolean),
-        'Object.assign(arguments[0], exports0);'
-    ].join('\n');
-}
-
-function generateExternsFile(exportedImports) {
-    return [
-        externsHeader(),
-        ...exportedImports.reduce((externBodies, { exports_ }) => [
-            ...externBodies, ...exports_.map(externBody)
-        ], []).filter(Boolean)
-    ].join('\n');
-}
-
-function externBody({ exportName, staticNames, instanceNames }) {
-    return [
-        `var ${exportName} = function() {};`,
-        staticNames.map((staticName) => (isNaN(+staticName)
-            ? `/** @type {?} */\n${exportName}.${staticName} = function() {};`
-            : `/** @type {?} */\n${exportName}[${staticName}] = function() {};`
-        )).join('\n'),
-        instanceNames.map((instanceName) => (isNaN(+instanceName)
-            ? `/** @type {?} */\n${exportName}.prototype.${instanceName};`
-            : `/** @type {?} */\n${exportName}.prototype[${instanceName}];`
-        )).join('\n')
-    ].filter(Boolean).join('\n');
-}
-
-function externsHeader() {
-    return (`${apacheHeader()}
-// @ts-nocheck
-/* eslint-disable */
-/**
- * @fileoverview Closure Compiler externs for Arrow
- * @externs
- * @suppress {duplicate,checkTypes}
- */
-/** @type {symbol} */
-Symbol.iterator;
-/** @type {symbol} */
-Symbol.toPrimitive;
-/** @type {symbol} */
-Symbol.asyncIterator;
-`);
-}
-
-function getPublicExportedNames(entryModule) {
-    const fn = function() {};
-    const isStaticOrProtoName = (x) => (
-        !(x in fn) &&
-        (x !== `default`) &&
-        (x !== `undefined`) &&
-        (x !== `__esModule`) &&
-        (x !== `constructor`) &&
-        !(x.startsWith('_'))
-    );
-    return Object
-        .getOwnPropertyNames(entryModule)
-        .filter((name) => name !== 'default')
-        .filter((name) => (
-            typeof entryModule[name] === `object` ||
-            typeof entryModule[name] === `function`
-        ))
-        .map((name) => [name, entryModule[name]])
-        .reduce((reserved, [name, value]) => {
-
-            const staticNames = value &&
-                typeof value === 'object' ? Object.getOwnPropertyNames(value).filter(isStaticOrProtoName) :
-                typeof value === 'function' ? Object.getOwnPropertyNames(value).filter(isStaticOrProtoName) : [];
-
-            const instanceNames = (typeof value === `function` && Object.getOwnPropertyNames(value.prototype || {}) || []).filter(isStaticOrProtoName);
-
-            return [...reserved, { exportName: name, staticNames, instanceNames }];
-        }, []);
-}
-
-function apacheHeader() {
-    return `// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.`;
-}
diff --git a/js/gulp/compile-task.js b/js/gulp/compile-task.js
deleted file mode 100644
index 60e2ebb..0000000
--- a/js/gulp/compile-task.js
+++ /dev/null
@@ -1,37 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const { Observable } = require('rxjs');
-const { npmPkgName } = require('./util');
-const { memoizeTask } = require('./memoize-task');
-
-const minifyTask = require('./minify-task');
-const closureTask = require('./closure-task');
-const typescriptTask = require('./typescript-task');
-const { arrowTask, arrowTSTask } = require('./arrow-task');
-
-const compileTask = ((cache) => memoizeTask(cache, function compile(target, format, ...args) {
-    return target === `src`                    ? Observable.empty()
-         : target === npmPkgName               ? arrowTask(target, format, ...args)()
-         : target === `ts`                     ? arrowTSTask(target, format, ...args)()
-         : format === `umd` ? target === `es5` ? closureTask(target, format, ...args)()
-                                               : minifyTask(target, format, ...args)()
-                                               : typescriptTask(target, format, ...args)();
-}))({});
-
-module.exports = compileTask;
-module.exports.compileTask = compileTask;
diff --git a/js/gulp/memoize-task.js b/js/gulp/memoize-task.js
deleted file mode 100644
index 408ee3b..0000000
--- a/js/gulp/memoize-task.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const { taskName } = require('./util');
-
-const createTask = ((taskFn) => ((target, format, ...args) => {
-  // Give the memoized fn a displayName so gulp's output is easier to follow.
-  const fn = () => taskFn(target, format, ...args);
-  fn.displayName = `${taskFn.name || ``}:${taskName(target, format, ...args)}:task`;
-  return fn;
-}));
-
-const memoizeTask = ((cache, taskFn) => ((target, format, ...args) => {
-    // Give the memoized fn a displayName so gulp's output is easier to follow.
-    const fn = () => (
-      cache[taskName(target, format)] || (
-      cache[taskName(target, format)] = taskFn(target, format, ...args)));
-    fn.displayName = `${taskFn.name || ``}:${taskName(target, format, ...args)}:task`;
-    return fn;
-}));
-
-module.exports = memoizeTask;
-module.exports.createTask = createTask;
-module.exports.memoizeTask = memoizeTask;
diff --git a/js/gulp/minify-task.js b/js/gulp/minify-task.js
deleted file mode 100644
index 81cb5e5..0000000
--- a/js/gulp/minify-task.js
+++ /dev/null
@@ -1,90 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const {
-    targetDir,
-    mainExport,
-    UMDSourceTargets,
-    terserLanguageNames,
-    shouldRunInChildProcess,
-    spawnGulpCommandInChildProcess,
-} = require('./util');
-
-const path = require('path');
-const webpack = require(`webpack`);
-const { memoizeTask } = require('./memoize-task');
-const { compileBinFiles } = require('./typescript-task');
-const { Observable, ReplaySubject } = require('rxjs');
-const TerserPlugin = require(`terser-webpack-plugin`);
-
-const minifyTask = ((cache, commonConfig) => memoizeTask(cache, function minifyJS(target, format) {
-
-    if (shouldRunInChildProcess(target, format)) {
-        return spawnGulpCommandInChildProcess('compile', target, format);
-    }
-
-    const sourceTarget = UMDSourceTargets[target];
-    const out = targetDir(target, format), src = targetDir(sourceTarget, `cls`);
-
-    const targetConfig = { ...commonConfig,
-        output: { ...commonConfig.output,
-            path: path.resolve(`./${out}`) } };
-
-    const webpackConfigs = [mainExport].map((entry) => ({
-        ...targetConfig,
-        name: entry,
-        entry: { [entry]: path.resolve(`${src}/${entry}.dom.js`) },
-        plugins: [
-            ...(targetConfig.plugins || []),
-            new webpack.SourceMapDevToolPlugin({
-                filename: `[name].${target}.min.js.map`,
-                moduleFilenameTemplate: ({ resourcePath }) =>
-                    resourcePath
-                        .replace(/\s/, `_`)
-                        .replace(/\.\/node_modules\//, ``)
-            })
-        ],
-        optimization: {
-            minimize: true,
-            minimizer: [
-                new TerserPlugin({
-                    sourceMap: true,
-                    terserOptions: {
-                        ecma: terserLanguageNames[target],
-                        output: { comments: false, beautify: false },
-                        compress: { unsafe: true },
-                        mangle: true,
-                        safari10: true // <-- works around safari10 bugs, see the "safari10" option here: https://github.com/terser-js/terser#minify-options
-                    },
-                })
-            ]
-        }
-    }));
-
-    const compilers = webpack(webpackConfigs);
-    return Observable
-            .bindNodeCallback(compilers.run.bind(compilers))()
-            .merge(compileBinFiles(target, format)).takeLast(1)
-            .multicast(new ReplaySubject()).refCount();
-}))({}, {
-    resolve: { mainFields: [`module`, `main`] },
-    module: { rules: [{ test: /\.js$/, enforce: `pre`, use: [`source-map-loader`] }] },
-    output: { filename: '[name].js', library: mainExport, libraryTarget: `umd`, umdNamedDefine: true },
-});
-
-module.exports = minifyTask;
-module.exports.minifyTask = minifyTask;
diff --git a/js/gulp/package-task.js b/js/gulp/package-task.js
deleted file mode 100644
index cb1d97c..0000000
--- a/js/gulp/package-task.js
+++ /dev/null
@@ -1,98 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const {
-    metadataFiles, packageJSONFields,
-    mainExport, npmPkgName, npmOrgName,
-    targetDir, packageName, observableFromStreams
-} = require('./util');
-
-const gulp = require('gulp');
-const { memoizeTask } = require('./memoize-task');
-const { Observable, ReplaySubject } = require('rxjs');
-const gulpJsonTransform = require('gulp-json-transform');
-
-const packageTask = ((cache) => memoizeTask(cache, function bundle(target, format) {
-    if (target === `src`) return Observable.empty();
-    const out = targetDir(target, format);
-    const jsonTransform = gulpJsonTransform(target === npmPkgName ? createMainPackageJson(target, format) :
-                                            target === `ts`       ? createTypeScriptPackageJson(target, format)
-                                                                  : createScopedPackageJSON(target, format),
-                                            2);
-    return Observable.forkJoin(
-      observableFromStreams(gulp.src(metadataFiles), gulp.dest(out)), // copy metadata files
-      observableFromStreams(gulp.src(`package.json`), jsonTransform, gulp.dest(out)) // write packageJSONs
-    ).publish(new ReplaySubject()).refCount();
-}))({});
-
-module.exports = packageTask;
-module.exports.packageTask = packageTask;
-
-const createMainPackageJson = (target, format) => (orig) => ({
-    ...createTypeScriptPackageJson(target, format)(orig),
-    bin: orig.bin,
-    name: npmPkgName,
-    main: `${mainExport}.node`,
-    browser: `${mainExport}.dom`,
-    module: `${mainExport}.dom.mjs`,
-    types: `${mainExport}.node.d.ts`,
-    unpkg: `${mainExport}.es2015.min.js`,
-    jsdelivr: `${mainExport}.es2015.min.js`,
-    sideEffects: false,
-    esm: { mode: `all`, sourceMap: true },
-});
-
-const createTypeScriptPackageJson = (target, format) => (orig) => ({
-    ...createScopedPackageJSON(target, format)(orig),
-    bin: undefined,
-    module: undefined,
-    main: `${mainExport}.node.ts`,
-    types: `${mainExport}.node.ts`,
-    browser: `${mainExport}.dom.ts`,
-    dependencies: {
-        '@types/flatbuffers': '*',
-        '@types/node': '*',
-        ...orig.dependencies
-    }
-});
-
-const createScopedPackageJSON = (target, format) => (({ name, ...orig }) =>
-    packageJSONFields.reduce(
-        (xs, key) => ({ ...xs, [key]: xs[key] || orig[key] }),
-        {
-            // un-set version, since it's automatically applied during the release process
-            version: undefined,
-            // set the scoped package name (e.g. "@apache-arrow/esnext-esm")
-            name: `${npmOrgName}/${packageName(target, format)}`,
-            // set "unpkg"/"jsdeliver" if building scoped UMD target
-            unpkg:    format === 'umd' ? `${mainExport}.js` : undefined,
-            jsdelivr: format === 'umd' ? `${mainExport}.js` : undefined,
-            // set "browser" if building scoped UMD target, otherwise "Arrow.dom"
-            browser:  format === 'umd' ? `${mainExport}.js` : `${mainExport}.dom.js`,
-            // set "main" to "Arrow" if building scoped UMD target, otherwise "Arrow.node"
-            main:     format === 'umd' ? `${mainExport}.js` : `${mainExport}.node`,
-            // set "module" (for https://www.npmjs.com/package/@pika/pack) if building scoped ESM target
-            module:   format === 'esm' ? `${mainExport}.dom.js` : undefined,
-            // set "sideEffects" to false as a hint to Webpack that it's safe to tree-shake the ESM target
-            sideEffects: format === 'esm' ? false : undefined,
-            // include "esm" settings for https://www.npmjs.com/package/esm if building scoped ESM target
-            esm:      format === `esm` ? { mode: `auto`, sourceMap: true } : undefined,
-            // set "types" (for TypeScript/VSCode)
-            types:    format === 'umd' ? undefined : `${mainExport}.node.d.ts`,
-        }
-    )
-);
diff --git a/js/gulp/test-task.js b/js/gulp/test-task.js
deleted file mode 100644
index 8c1eab1..0000000
--- a/js/gulp/test-task.js
+++ /dev/null
@@ -1,176 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const del = require('del');
-const path = require('path');
-const mkdirp = require('mkdirp');
-const cpy = require('cpy');
-const { argv } = require('./argv');
-const { promisify } = require('util');
-const glob = promisify(require('glob'));
-const child_process = require(`child_process`);
-const { memoizeTask } = require('./memoize-task');
-const readFile = promisify(require('fs').readFile);
-const asyncDone = promisify(require('async-done'));
-const exec = promisify(require('child_process').exec);
-const parseXML = promisify(require('xml2js').parseString);
-
-const jestArgv = [];
-argv.verbose && jestArgv.push(`--verbose`);
-argv.coverage
-    ? jestArgv.push(`-c`, `jest.coverage.config.js`, `--coverage`, `-i`)
-    : jestArgv.push(`-c`, `jest.config.js`, `-i`)
-
-const jest = path.join(path.parse(require.resolve(`jest`)).dir, `../bin/jest.js`);
-const testOptions = {
-    stdio: [`ignore`, `inherit`, `inherit`],
-    env: {
-        ...process.env,
-        // hide fs.promises/stream[Symbol.asyncIterator] warnings
-        NODE_NO_WARNINGS: `1`,
-        // prevent the user-land `readable-stream` module from
-        // patching node's streams -- they're better now
-        READABLE_STREAM: `disable`
-    },
-};
-
-const testTask = ((cache, execArgv, testOptions) => memoizeTask(cache, function test(target, format) {
-    const opts = { ...testOptions };
-    const args = [...execArgv, `test/unit/`];
-    opts.env = {
-        ...opts.env,
-        TEST_TARGET: target,
-        TEST_MODULE: format,
-        TEST_DOM_STREAMS: (target ==='src' || format === 'umd').toString(),
-        TEST_NODE_STREAMS: (target ==='src' || format !== 'umd').toString(),
-        TEST_TS_SOURCE: !!argv.coverage || (target === 'src') || (opts.env.TEST_TS_SOURCE === 'true')
-    };
-    return asyncDone(() => child_process.spawn(`node`, args, opts));
-}))({}, [jest, ...jestArgv], testOptions);
-
-module.exports = testTask;
-module.exports.testTask = testTask;
-module.exports.cleanTestData = cleanTestData;
-module.exports.createTestData = createTestData;
-
-// Pull C++ and Java paths from environment vars first, otherwise sane defaults
-const ARROW_HOME = process.env.ARROW_HOME || path.resolve('../');
-const ARROW_JAVA_DIR = process.env.ARROW_JAVA_DIR || path.join(ARROW_HOME, 'java');
-const CPP_EXE_PATH = process.env.ARROW_CPP_EXE_PATH || path.join(ARROW_HOME, 'cpp/build/debug');
-const ARROW_INTEGRATION_DIR = process.env.ARROW_INTEGRATION_DIR || path.join(ARROW_HOME, 'integration');
-const CPP_JSON_TO_ARROW = path.join(CPP_EXE_PATH, 'arrow-json-integration-test');
-const CPP_STREAM_TO_FILE = path.join(CPP_EXE_PATH, 'arrow-stream-to-file');
-const CPP_FILE_TO_STREAM = path.join(CPP_EXE_PATH, 'arrow-file-to-stream');
-
-const testFilesDir = path.join(ARROW_HOME, 'js/test/data');
-const snapshotsDir = path.join(ARROW_HOME, 'js/test/__snapshots__');
-const cppFilesDir = path.join(testFilesDir, 'cpp');
-const javaFilesDir = path.join(testFilesDir, 'java');
-const jsonFilesDir = path.join(testFilesDir, 'json');
-
-async function cleanTestData() {
-    return await del([
-        `${cppFilesDir}/**`,
-        `${javaFilesDir}/**`,
-        `${jsonFilesDir}/**`,
-        `${snapshotsDir}/**`
-    ]);
-}
-
-async function createTestJSON() {
-    await mkdirp(jsonFilesDir);
-    await cpy(`cp ${ARROW_INTEGRATION_DIR}/data/*.json`, jsonFilesDir);
-    await exec(`python3 ${ARROW_INTEGRATION_DIR}/integration_test.py --write_generated_json ${jsonFilesDir}`);
-}
-
-async function createTestData() {
-
-    let JAVA_TOOLS_JAR = process.env.ARROW_JAVA_INTEGRATION_JAR;
-    if (!JAVA_TOOLS_JAR) {
-        const pom_version = await
-            readFile(path.join(ARROW_JAVA_DIR, 'pom.xml'))
-                .then((pom) => parseXML(pom.toString()))
-                .then((pomXML) => pomXML.project.version[0]);
-        JAVA_TOOLS_JAR = path.join(ARROW_JAVA_DIR, `/tools/target/arrow-tools-${pom_version}-jar-with-dependencies.jar`);
-    }
-
-    await cleanTestData().then(createTestJSON);
-    await mkdirp(path.join(cppFilesDir, 'file'));
-    await mkdirp(path.join(javaFilesDir, 'file'));
-    await mkdirp(path.join(cppFilesDir, 'stream'));
-    await mkdirp(path.join(javaFilesDir, 'stream'));
-
-    const errors = [];
-    const names = await glob(path.join(jsonFilesDir, '*.json'));
-
-    for (let jsonPath of names) {
-        const name = path.parse(path.basename(jsonPath)).name;
-        const arrowCppFilePath = path.join(cppFilesDir, 'file', `${name}.arrow`);
-        const arrowJavaFilePath = path.join(javaFilesDir, 'file', `${name}.arrow`);
-        const arrowCppStreamPath = path.join(cppFilesDir, 'stream', `${name}.arrow`);
-        const arrowJavaStreamPath = path.join(javaFilesDir, 'stream', `${name}.arrow`);
-        try {
-            await generateCPPFile(path.resolve(jsonPath), arrowCppFilePath);
-            await generateCPPStream(arrowCppFilePath, arrowCppStreamPath);
-        } catch (e) { errors.push(`${e.stdout}\n${e.message}`); }
-        try {
-            await generateJavaFile(path.resolve(jsonPath), arrowJavaFilePath);
-            await generateJavaStream(arrowJavaFilePath, arrowJavaStreamPath);
-        } catch (e) { errors.push(`${e.stdout}\n${e.message}`); }
-    }
-    if (errors.length) {
-        console.error(errors.join(`\n`));
-        process.exit(1);
-    }
-
-    async function generateCPPFile(jsonPath, filePath) {
-        await del(filePath);
-        return await exec(
-            `${CPP_JSON_TO_ARROW} ${
-            `--integration --mode=JSON_TO_ARROW`} ${
-            `--json=${jsonPath} --arrow=${filePath}`}`,
-            { maxBuffer: Math.pow(2, 53) - 1 }
-        );
-    }
-
-    async function generateCPPStream(filePath, streamPath) {
-        await del(streamPath);
-        return await exec(
-            `${CPP_FILE_TO_STREAM} ${filePath} > ${streamPath}`,
-            { maxBuffer: Math.pow(2, 53) - 1 }
-        );
-    }
-
-    async function generateJavaFile(jsonPath, filePath) {
-        await del(filePath);
-        return await exec(
-            `java -cp ${JAVA_TOOLS_JAR} ${
-            `org.apache.arrow.tools.Integration -c JSON_TO_ARROW`} ${
-            `-j ${path.resolve(jsonPath)} -a ${filePath}`}`,
-            { maxBuffer: Math.pow(2, 53) - 1 }
-        );
-    }
-
-    async function generateJavaStream(filePath, streamPath) {
-        await del(streamPath);
-        return await exec(
-            `java -cp ${JAVA_TOOLS_JAR} ${
-            `org.apache.arrow.tools.FileToStream`} ${filePath} ${streamPath}`,
-            { maxBuffer: Math.pow(2, 53) - 1 }
-        );
-    }
-}
diff --git a/js/gulp/typescript-task.js b/js/gulp/typescript-task.js
deleted file mode 100644
index a56de42..0000000
--- a/js/gulp/typescript-task.js
+++ /dev/null
@@ -1,69 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const {
-    targetDir,
-    tsconfigName,
-    observableFromStreams,
-    shouldRunInChildProcess,
-    spawnGulpCommandInChildProcess,
-} = require('./util');
-
-const gulp = require('gulp');
-const path = require('path');
-const ts = require(`gulp-typescript`);
-const sourcemaps = require('gulp-sourcemaps');
-const { memoizeTask } = require('./memoize-task');
-const { Observable, ReplaySubject } = require('rxjs');
-
-const typescriptTask = ((cache) => memoizeTask(cache, function typescript(target, format) {
-
-    if (shouldRunInChildProcess(target, format)) {
-        return spawnGulpCommandInChildProcess('compile', target, format);
-    }
-
-    const out = targetDir(target, format);
-    const tsconfigPath = path.join(`tsconfig`, `tsconfig.${tsconfigName(target, format)}.json`);
-    return compileTypescript(out, tsconfigPath)
-        .merge(compileBinFiles(target, format)).takeLast(1)
-        .publish(new ReplaySubject()).refCount();
-}))({});
-
-function compileBinFiles(target, format) {
-    const out = targetDir(target, format);
-    const tsconfigPath = path.join(`tsconfig`, `tsconfig.${tsconfigName('bin', 'cjs')}.json`);
-    return compileTypescript(path.join(out, 'bin'), tsconfigPath, { target });
-}
-
-function compileTypescript(out, tsconfigPath, tsconfigOverrides) {
-    const tsProject = ts.createProject(tsconfigPath, { typescript: require(`typescript`), ...tsconfigOverrides });
-    const { stream: { js, dts } } = observableFromStreams(
-      tsProject.src(), sourcemaps.init(),
-      tsProject(ts.reporter.defaultReporter())
-    );
-    const writeDTypes = observableFromStreams(dts, gulp.dest(out));
-    const mapFile = tsProject.options.module === 5 ? esmMapFile : cjsMapFile;
-    const writeJS = observableFromStreams(js, sourcemaps.write('./', { mapFile }), gulp.dest(out));
-    return Observable.forkJoin(writeDTypes, writeJS);
-}
-
-function cjsMapFile(mapFilePath) { return mapFilePath; }
-function esmMapFile(mapFilePath) { return mapFilePath.replace('.js.map', '.mjs.map'); }
-
-module.exports = typescriptTask;
-module.exports.typescriptTask = typescriptTask;
-module.exports.compileBinFiles = compileBinFiles;
diff --git a/js/gulp/util.js b/js/gulp/util.js
deleted file mode 100644
index c07f5f3..0000000
--- a/js/gulp/util.js
+++ /dev/null
@@ -1,218 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const fs = require('fs');
-const path = require(`path`);
-const pump = require(`stream`).pipeline;
-const child_process = require(`child_process`);
-const { targets, modules } = require('./argv');
-const { Observable, ReplaySubject } = require('rxjs');
-const asyncDone = require('util').promisify(require('async-done'));
-
-const mainExport = `Arrow`;
-const npmPkgName = `apache-arrow`;
-const npmOrgName = `@${npmPkgName}`;
-
-const releasesRootDir = `targets`;
-const knownTargets = [`es5`, `es2015`, `esnext`];
-const knownModules = [`cjs`, `esm`, `cls`, `umd`];
-const tasksToSkipPerTargetOrFormat = {
-    src: { clean: true, build: true },
-    cls: { test: true, package: true }
-};
-const packageJSONFields = [
-  `version`, `license`, `description`,
-  `author`, `homepage`, `repository`,
-  `bugs`, `keywords`,  `dependencies`,
-  `bin`
-];
-
-const metadataFiles = [`LICENSE.txt`, `NOTICE.txt`, `README.md`].map((filename) => {
-    let err = false, prefixes = [`./`, `../`];
-    let p = prefixes.find((prefix) => {
-        try {
-            fs.statSync(path.resolve(path.join(prefix, filename)));
-        } catch (e) { return false; }
-        return true;
-    });
-    if (!p) {
-        throw new Error(`Couldn't find ${filename} in ./ or ../`);
-    }
-    return path.join(p, filename);
-});
-
-// see: https://github.com/google/closure-compiler/blob/c1372b799d94582eaf4b507a4a22558ff26c403c/src/com/google/javascript/jscomp/CompilerOptions.java#L2988
-const gCCLanguageNames = {
-    es5: `ECMASCRIPT5`,
- es2015: `ECMASCRIPT_2015`,
- es2016: `ECMASCRIPT_2016`,
- es2017: `ECMASCRIPT_2017`,
- esnext: `ECMASCRIPT_NEXT`
-};
-
-const UMDSourceTargets = {
-    es5: `es5`,
- es2015: `es2015`,
- es2016: `es2016`,
- es2017: `es2017`,
- esnext: `esnext`
-};
-
-const terserLanguageNames = {
-    es5: 5, es2015: 6,
- es2016: 7, es2017: 8,
- esnext: 8 // <--- ?
-};
-
-// ES7+ keywords Terser shouldn't mangle
-// Hardcoded here since some are from ES7+, others are
-// only defined in interfaces, so difficult to get by reflection.
-const ESKeywords = [
-    // PropertyDescriptors
-    `configurable`, `enumerable`,
-    // IteratorResult, Symbol.asyncIterator
-    `done`, `value`, `Symbol.asyncIterator`, `asyncIterator`,
-    // AsyncObserver
-    `values`, `hasError`, `hasCompleted`,`errorValue`, `closed`,
-    // Observable/Subscription/Scheduler
-    `next`, `error`, `complete`, `subscribe`, `unsubscribe`, `isUnsubscribed`,
-    // EventTarget
-    `addListener`, `removeListener`, `addEventListener`, `removeEventListener`,
-    // Arrow properties
-    `low`, `high`, `data`, `index`, `field`, `columns`, 'numCols', 'numRows', `values`, `valueOffsets`, `nullBitmap`, `subarray`
-];
-
-function taskName(target, format) {
-    return !format ? target : `${target}:${format}`;
-}
-
-function packageName(target, format) {
-    return !format ? target : `${target}-${format}`;
-}
-
-function tsconfigName(target, format) {
-    return !format ? target : `${target}.${format}`;
-}
-
-function targetDir(target, format) {
-    return path.join(releasesRootDir, ...(!format ? [target] : [target, format]));
-}
-
-function shouldRunInChildProcess(target, format) {
-    // If we're building more than one module/target, then yes run this task in a child process
-    if (targets.length > 1 || modules.length > 1) { return true; }
-    // If the target we're building *isn't* the target the gulp command was configured to run, then yes run that in a child process
-    if (targets[0] !== target || modules[0] !== format) { return true; }
-    // Otherwise no need -- either gulp was run for just one target, or we've been spawned as the child of a multi-target parent gulp
-    return false;
-}
-
-const gulp = path.join(path.parse(require.resolve(`gulp`)).dir, `bin/gulp.js`);
-function spawnGulpCommandInChildProcess(command, target, format) {
-    const args = [gulp, command, '-t', target, '-m', format, `--silent`];
-    const opts = {
-        stdio: [`ignore`, `inherit`, `inherit`],
-        env: { ...process.env, NODE_NO_WARNINGS: `1` }
-    };
-    return asyncDone(() => child_process.spawn(`node`, args, opts))
-        .catch((e) => { throw `Error in "${command}:${taskName(target, format)}" task`; });
-}
-
-const logAndDie = (e) => { if (e) { process.exit(1); } };
-function observableFromStreams(...streams) {
-    if (streams.length <= 0) { return Observable.empty(); }
-    const pumped = streams.length <= 1 ? streams[0] : pump(...streams, logAndDie);
-    const fromEvent = Observable.fromEvent.bind(null, pumped);
-    const streamObs = fromEvent(`data`)
-               .merge(fromEvent(`error`).flatMap((e) => Observable.throw(e)))
-           .takeUntil(fromEvent(`end`).merge(fromEvent(`close`)))
-           .defaultIfEmpty(`empty stream`)
-           .multicast(new ReplaySubject()).refCount();
-    streamObs.stream = pumped;
-    streamObs.observable = streamObs;
-    return streamObs;
-}
-
-function* combinations(_targets, _modules) {
-
-    const targets = known(knownTargets, _targets || [`all`]);
-    const modules = known(knownModules, _modules || [`all`]);
-
-    if (_targets.indexOf(`src`) > -1) {
-        yield [`src`, ``];
-        return;
-    }
-
-    if (_targets.indexOf(`all`) > -1 && _modules.indexOf(`all`) > -1) {
-        yield [`ts`, ``];
-        yield [`src`, ``];
-        yield [npmPkgName, ``];
-    }
-
-    for (const format of modules) {
-        for (const target of targets) {
-            yield [target, format];
-        }
-    }
-
-    function known(known, values) {
-        return ~values.indexOf(`all`) ? known
-            :  ~values.indexOf(`src`) ? [`src`]
-            : Object.keys(
-                values.reduce((map, arg) => ((
-                    (known.indexOf(arg) !== -1) &&
-                    (map[arg.toLowerCase()] = true)
-                    || true) && map
-                ), {})
-            ).sort((a, b) => known.indexOf(a) - known.indexOf(b));
-    }
-}
-
-const publicModulePaths = (dir) => [
-    `${dir}/${mainExport}.dom.js`,
-    `${dir}/util/int.js`,
-    `${dir}/compute/predicate.js`,
-];
-
-const esmRequire = require(`esm`)(module, {
-    mode: `auto`,
-    cjs: {
-        /* A boolean for storing ES modules in require.cache. */
-        cache: true,
-        /* A boolean for respecting require.extensions in ESM. */
-        extensions: true,
-        /* A boolean for __esModule interoperability. */
-        interop: true,
-        /* A boolean for importing named exports of CJS modules. */
-        namedExports: true,
-        /* A boolean for following CJS path rules in ESM. */
-        paths: true,
-        /* A boolean for __dirname, __filename, and require in ESM. */
-        vars: true,
-    }
-});
-
-module.exports = {
-
-    mainExport, npmPkgName, npmOrgName, metadataFiles, packageJSONFields,
-
-    knownTargets, knownModules, tasksToSkipPerTargetOrFormat,
-    gCCLanguageNames, UMDSourceTargets, terserLanguageNames,
-
-    taskName, packageName, tsconfigName, targetDir, combinations, observableFromStreams,
-    ESKeywords, publicModulePaths, esmRequire, shouldRunInChildProcess, spawnGulpCommandInChildProcess
-};
diff --git a/js/gulpfile.js b/js/gulpfile.js
deleted file mode 100644
index 271bd34..0000000
--- a/js/gulpfile.js
+++ /dev/null
@@ -1,102 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const del = require('del');
-const gulp = require('gulp');
-const { Observable } = require('rxjs');
-const cleanTask = require('./gulp/clean-task');
-const compileTask = require('./gulp/compile-task');
-const packageTask = require('./gulp/package-task');
-const { targets, modules } = require('./gulp/argv');
-const { testTask, createTestData, cleanTestData } = require('./gulp/test-task');
-const {
-    taskName, combinations,
-    targetDir, knownTargets,
-    npmPkgName, UMDSourceTargets,
-    tasksToSkipPerTargetOrFormat
-} = require('./gulp/util');
-
-for (const [target, format] of combinations([`all`], [`all`])) {
-    const task = taskName(target, format);
-    gulp.task(`clean:${task}`, cleanTask(target, format));
-    gulp.task(`test:${task}`,  testTask(target, format));
-    gulp.task(`compile:${task}`, compileTask(target, format));
-    gulp.task(`package:${task}`, packageTask(target, format));
-    gulp.task(`build:${task}`, gulp.series(
-        `clean:${task}`, `compile:${task}`, `package:${task}`
-    ));
-}
-
-// The UMD bundles build temporary es5/6/next targets via TS,
-// then run the TS source through either closure-compiler or
-// a minifier, so we special case that here.
-knownTargets.forEach((target) => {
-    const umd = taskName(target, `umd`);
-    const cls = taskName(UMDSourceTargets[target], `cls`);
-    gulp.task(`build:${umd}`, gulp.series(
-        `build:${cls}`,
-        `clean:${umd}`, `compile:${umd}`, `package:${umd}`,
-        function remove_closure_tmp_files() {
-            return del(targetDir(target, `cls`))
-        }
-    ));
-});
-
-// The main "apache-arrow" module builds the es2015/umd, esnext/cjs,
-// esnext/esm, and esnext/umd targets, then copies and renames the
-// compiled output into the apache-arrow folder
-gulp.task(`build:${npmPkgName}`,
-    gulp.series(
-        gulp.parallel(
-            `build:${taskName(`es2015`, `umd`)}`,
-            `build:${taskName(`esnext`, `cjs`)}`,
-            `build:${taskName(`esnext`, `esm`)}`,
-            `build:${taskName(`esnext`, `umd`)}`
-        ),
-        `clean:${npmPkgName}`,
-        `compile:${npmPkgName}`,
-        `package:${npmPkgName}`
-    )
-);
-
-// And finally the global composite tasks
-gulp.task(`clean:testdata`, cleanTestData);
-gulp.task(`create:testdata`, createTestData);
-gulp.task(`test`, gulpConcurrent(getTasks(`test`)));
-gulp.task(`clean`, gulp.parallel(getTasks(`clean`)));
-gulp.task(`build`, gulpConcurrent(getTasks(`build`)));
-gulp.task(`compile`, gulpConcurrent(getTasks(`compile`)));
-gulp.task(`package`, gulpConcurrent(getTasks(`package`)));
-gulp.task(`default`,  gulp.series(`clean`, `build`, `test`));
-
-function gulpConcurrent(tasks) {
-    const numCPUs = Math.max(1, require('os').cpus().length * 0.75) | 0;
-    return () => Observable.from(tasks.map((task) => gulp.series(task)))
-        .flatMap((task) => Observable.bindNodeCallback(task)(), numCPUs);
-}
-
-function getTasks(name) {
-    const tasks = [];
-    if (targets.indexOf(`ts`) !== -1) tasks.push(`${name}:ts`);
-    if (targets.indexOf(npmPkgName) !== -1) tasks.push(`${name}:${npmPkgName}`);
-    for (const [target, format] of combinations(targets, modules)) {
-        if (tasksToSkipPerTargetOrFormat[target] && tasksToSkipPerTargetOrFormat[target][name]) continue;
-        if (tasksToSkipPerTargetOrFormat[format] && tasksToSkipPerTargetOrFormat[format][name]) continue;
-        tasks.push(`${name}:${taskName(target, format)}`);
-    }
-    return tasks.length && tasks || [(done) => done()];
-}
diff --git a/js/index.js b/js/index.js
deleted file mode 100644
index e42cb32..0000000
--- a/js/index.js
+++ /dev/null
@@ -1,18 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-module.exports = require('./targets/apache-arrow');
\ No newline at end of file
diff --git a/js/index.mjs b/js/index.mjs
deleted file mode 100644
index 3043537..0000000
--- a/js/index.mjs
+++ /dev/null
@@ -1,18 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-export * from './targets/apache-arrow';
\ No newline at end of file
diff --git a/js/index.ts b/js/index.ts
deleted file mode 100644
index cfd64bb..0000000
--- a/js/index.ts
+++ /dev/null
@@ -1,18 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-export * from './src/Arrow.node';
\ No newline at end of file
diff --git a/js/jest.config.js b/js/jest.config.js
deleted file mode 100644
index 55028d0..0000000
--- a/js/jest.config.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-module.exports = {
-    "verbose": false,
-    "reporters": [
-      "jest-silent-reporter"
-    ],
-    "testEnvironment": "node",
-    "globals": {
-      "ts-jest": {
-        "diagnostics": false,
-        "tsConfig": "test/tsconfig.json"
-      }
-    },
-    "roots": [
-      "<rootDir>/test/"
-    ],
-    "moduleFileExtensions": [
-      "js",
-      "ts",
-      "tsx"
-    ],
-    "coverageReporters": [
-      "lcov"
-    ],
-    "coveragePathIgnorePatterns": [
-      "fb\\/(File|Message|Schema|Tensor)\\.(js|ts)$",
-      "test\\/.*\\.(ts|tsx|js)$",
-      "/node_modules/"
-    ],
-    "transform": {
-      "^.+\\.jsx?$": "ts-jest",
-      "^.+\\.tsx?$": "ts-jest"
-    },
-    "transformIgnorePatterns": [
-      "/node_modules/(?!web-stream-tools).+\\.js$"
-    ],
-    "testRegex": "(.*(-|\\.)(test|spec)s?)\\.(ts|tsx|js)$",
-    "preset": "ts-jest",
-    "testMatch": null
-};
diff --git a/js/jest.coverage.config.js b/js/jest.coverage.config.js
deleted file mode 100644
index ac98794..0000000
--- a/js/jest.coverage.config.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-module.exports = {
-    ...require('./jest.config'),
-    reporters: undefined,
-    coverageReporters: [
-        lcov, 'json'
-    ],
-    globals: {
-        'ts-jest': {
-            diagnostics: false,
-            tsConfig: 'test/tsconfig.coverage.json'
-        }
-    }
-};
diff --git a/js/lerna.json b/js/lerna.json
deleted file mode 100644
index 053736e..0000000
--- a/js/lerna.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "npmClient": "yarn",
-  "packages": [
-    "targets/ts",
-    "targets/es5/*",
-    "targets/es2015/*",
-    "targets/esnext/*",
-    "targets/apache-arrow"
-  ]
-}
diff --git a/js/npm-release.sh b/js/npm-release.sh
deleted file mode 100755
index 54d9df0..0000000
--- a/js/npm-release.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-set -e
-
-# validate the targets pass all tests before publishing
-yarn --frozen-lockfile
-yarn gulp
-
-# publish the JS target modules to npm
-yarn lerna exec --no-bail -- npm publish
diff --git a/js/package.json b/js/package.json
deleted file mode 100644
index 880a5b9..0000000
--- a/js/package.json
+++ /dev/null
@@ -1,111 +0,0 @@
-{
-  "name": "apache-arrow",
-  "description": "Apache Arrow columnar in-memory format",
-  "main": "./index",
-  "bin": {
-    "arrow2csv": "bin/arrow2csv.js"
-  },
-  "scripts": {
-    "lerna": "lerna",
-    "test": "NODE_NO_WARNINGS=1 gulp test",
-    "build": "NODE_NO_WARNINGS=1 gulp build",
-    "clean": "NODE_NO_WARNINGS=1 gulp clean",
-    "debug": "NODE_NO_WARNINGS=1 gulp debug",
-    "perf": "node ./perf/index.js",
-    "test:integration": "node ./bin/integration.js --mode validate",
-    "create:perfdata": "python ./test/data/tables/generate.py ./test/data/tables/tracks.arrow",
-    "release": "./npm-release.sh",
-    "clean:all": "run-p clean clean:testdata",
-    "clean:testdata": "gulp clean:testdata",
-    "create:testdata": "gulp create:testdata",
-    "test:coverage": "gulp test -t src --coverage",
-    "doc": "del-cli ./doc && typedoc --options typedoc.js",
-    "lint": "eslint src test --fix",
-    "lint:ci": "eslint src test",
-    "prepublishOnly": "echo \"Error: do 'yarn release' instead of 'npm publish'\" && exit 1",
-    "version": "yarn && yarn clean:all"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/apache/arrow.git"
-  },
-  "keywords": [
-    "apache",
-    "arrow"
-  ],
-  "author": "Apache Software Foundation",
-  "license": "Apache-2.0",
-  "bugs": {
-    "url": "https://issues.apache.org/jira/projects/ARROW"
-  },
-  "homepage": "https://github.com/apache/arrow/blob/master/js/README.md",
-  "files": [
-    "bin",
-    "src",
-    "gulp",
-    "test",
-    "*.json",
-    "tsconfig",
-    "README.md",
-    "gulpfile.js",
-    "npm-release.sh",
-    "jest.config.js",
-    "jest.coverage.config.js"
-  ],
-  "dependencies": {
-    "@types/flatbuffers": "^1.10.0",
-    "@types/node": "^14.14.37",
-    "@types/text-encoding-utf-8": "^1.0.1",
-    "command-line-args": "5.1.1",
-    "command-line-usage": "6.1.1",
-    "flatbuffers": "1.12.0",
-    "json-bignum": "^0.0.3",
-    "pad-left": "^2.1.0",
-    "text-encoding-utf-8": "^1.0.2",
-    "tslib": "^2.2.0"
-  },
-  "devDependencies": {
-    "@types/glob": "7.1.1",
-    "@types/jest": "26.0.22",
-    "@typescript-eslint/eslint-plugin": "^4.22.0",
-    "@typescript-eslint/parser": "^4.22.0",
-    "async-done": "1.3.1",
-    "benchmark": "2.1.4",
-    "cpy": "^8.1.2",
-    "del-cli": "3.0.1",
-    "eslint": "^7.24.0",
-    "eslint-plugin-jest": "^24.3.5",
-    "esm": "3.2.25",
-    "glob": "7.1.4",
-    "google-closure-compiler": "20210406.0.0",
-    "gulp": "4.0.2",
-    "gulp-json-transform": "0.4.6",
-    "gulp-rename": "1.4.0",
-    "gulp-sourcemaps": "2.6.5",
-    "gulp-typescript": "5.0.1",
-    "ix": "2.5.3",
-    "jest": "26.6.3",
-    "jest-silent-reporter": "0.1.2",
-    "lerna": "3.22.1",
-    "memfs": "2.15.2",
-    "mkdirp": "1.0.4",
-    "multistream": "2.1.1",
-    "npm-run-all": "4.1.5",
-    "randomatic": "3.1.1",
-    "rxjs": "5.5.11",
-    "source-map-loader": "0.2.4",
-    "terser-webpack-plugin": "4.2.2",
-    "ts-jest": "26.5.4",
-    "ts-node": "9.1.1",
-    "typedoc": "0.20.35",
-    "typescript": "4.0.2",
-    "web-stream-tools": "0.0.1",
-    "web-streams-polyfill": "3.0.3",
-    "webpack": "4.29.0",
-    "xml2js": "0.4.19"
-  },
-  "engines": {
-    "node": ">=11.12"
-  },
-  "version": "4.0.0-SNAPSHOT"
-}
diff --git a/js/perf/config.js b/js/perf/config.js
deleted file mode 100644
index cca1080..0000000
--- a/js/perf/config.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const fs = require('fs');
-const path = require('path');
-const glob = require('glob');
-
-const config = [];
-const filenames = glob.sync(path.resolve(__dirname, `../test/data/cpp/stream`, `*.arrow`));
-
-for (const filename of filenames) {
-    const { name } = path.parse(filename);
-    config.push({ name, buffers: [fs.readFileSync(filename)] });
-}
-
-module.exports = config;
diff --git a/js/perf/index.js b/js/perf/index.js
deleted file mode 100644
index 7535c9f..0000000
--- a/js/perf/index.js
+++ /dev/null
@@ -1,248 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-// Use the ES5 UMD target as perf baseline
-// const { predicate, Table, RecordBatchReader } = require('../targets/es5/umd');
-// const { predicate, Table, RecordBatchReader } = require('../targets/es5/cjs');
-// const { predicate, Table, RecordBatchReader } = require('../targets/es2015/umd');
-const { predicate, Table, RecordBatchReader } = require('../targets/es2015/cjs');
-const { col } = predicate;
-
-const Benchmark = require('benchmark');
-
-const suites = [];
-
-for (let { name, buffers } of require('./table_config')) {
-    const parseSuiteName = `Parse "${name}"`;
-    const sliceSuiteName = `Slice "${name}" vectors`;
-    const iterateSuiteName = `Iterate "${name}" vectors`;
-    const getByIndexSuiteName = `Get "${name}" values by index`;
-    const sliceToArraySuiteName = `Slice toArray "${name}" vectors`;
-    suites.push(createTestSuite(parseSuiteName, createFromTableTest(name, buffers)));
-    suites.push(createTestSuite(parseSuiteName, createReadBatchesTest(name, buffers)));
-    const table = Table.from(buffers), schema = table.schema;
-    suites.push(...schema.fields.map((f, i) => createTestSuite(getByIndexSuiteName, createGetByIndexTest(table.getColumnAt(i), f.name))));
-    suites.push(...schema.fields.map((f, i) => createTestSuite(iterateSuiteName, createIterateTest(table.getColumnAt(i), f.name))));
-    suites.push(...schema.fields.map((f, i) => createTestSuite(sliceToArraySuiteName, createSliceToArrayTest(table.getColumnAt(i), f.name))));
-    suites.push(...schema.fields.map((f, i) => createTestSuite(sliceSuiteName, createSliceTest(table.getColumnAt(i), f.name))));
-}
-
-for (let {name, buffers, countBys, counts} of require('./table_config')) {
-    const table = Table.from(buffers);
-
-    const tableIterateSuiteName = `Table Iterate "${name}"`;
-    const dfCountBySuiteName = `DataFrame Count By "${name}"`;
-    const dfFilterCountSuiteName = `DataFrame Filter-Scan Count "${name}"`;
-    const dfDirectCountSuiteName = `DataFrame Direct Count "${name}"`;
-    const dfFilterIterSuiteName = `DataFrame Filter-Iterate "${name}"`;
-
-    suites.push(createTestSuite(tableIterateSuiteName, createTableIterateTest(table)));
-    suites.push(...countBys.map((countBy) => createTestSuite(dfCountBySuiteName, createDataFrameCountByTest(table, countBy))));
-    suites.push(...counts.map(({ col, test, value }) => createTestSuite(dfFilterCountSuiteName, createDataFrameFilterCountTest(table, col, test, value))));
-    suites.push(...counts.map(({ col, test, value }) => createTestSuite(dfDirectCountSuiteName, createDataFrameDirectCountTest(table, col, test, value))));
-    suites.push(...counts.map(({ col, test, value }) => createTestSuite(dfFilterIterSuiteName, createDataFrameFilterIterateTest(table, col, test, value))));
-}
-
-console.log('Running apache-arrow performance tests...\n');
-
-run();
-
-function run() {
-    const suite = suites.shift();
-    suite && suite.on('complete', function() {
-        console.log(suite.name + ':\n' + this.map(function(x) {
-            const str = x.toString();
-            const meanMsPerOp = Math.round(x.stats.mean * 100000)/100;
-            const sliceOf60FPS = Math.round((meanMsPerOp / (1000/60)) * 100000)/1000;
-            return `${str}\n   avg: ${meanMsPerOp}ms\n   ${sliceOf60FPS}% of a frame @ 60FPS ${x.suffix || ''}`;
-        }).join('\n') + '\n');
-        if (suites.length > 0) {
-            setTimeout(run, 1000);
-        }
-    })
-    .run({ async: true });
-}
-
-function createTestSuite(name, test) {
-    return new Benchmark.Suite(name, { async: true }).add(test);
-}
-
-function createFromTableTest(name, buffers) {
-    let table;
-    return {
-        async: true,
-        name: `Table.from\n`,
-        fn() { table = Table.from(buffers); }
-    };
-}
-
-function createReadBatchesTest(name, buffers) {
-    let recordBatch;
-    return {
-        async: true,
-        name: `readBatches\n`,
-        fn() { for (recordBatch of RecordBatchReader.from(buffers)) {} }
-    };
-}
-
-function createSliceTest(vector, name) {
-    let xs;
-    return {
-        async: true,
-        name: `name: '${name}', length: ${vector.length}, type: ${vector.type}\n`,
-        fn() { xs = vector.slice(); }
-    };
-}
-
-function createSliceToArrayTest(vector, name) {
-    let xs;
-    return {
-        async: true,
-        name: `name: '${name}', length: ${vector.length}, type: ${vector.type}\n`,
-        fn() { xs = vector.slice().toArray(); }
-    };
-}
-
-function createIterateTest(vector, name) {
-    let value;
-    return {
-        async: true,
-        name: `name: '${name}', length: ${vector.length}, type: ${vector.type}\n`,
-        fn() { for (value of vector) {} }
-    };
-}
-
-function createGetByIndexTest(vector, name) {
-    let value;
-    return {
-        async: true,
-        name: `name: '${name}', length: ${vector.length}, type: ${vector.type}\n`,
-        fn() {
-            for (let i = -1, n = vector.length; ++i < n;) {
-                value = vector.get(i);
-            }
-        }
-    };
-}
-
-function createTableIterateTest(table) {
-    let value;
-    return {
-        async: true,
-        name: `length: ${table.length}\n`,
-        fn() { for (value of table) {} }
-    };
-}
-
-function createDataFrameDirectCountTest(table, column, test, value) {
-    let sum, colidx = table.schema.fields.findIndex((c)=>c.name === column), op;
-
-    if (test == 'gt') {
-        op = () => {
-            sum = 0;
-            let batches = table.chunks;
-            let numBatches = batches.length;
-            for (let batchIndex = -1; ++batchIndex < numBatches;) {
-                // load batches
-                const batch = batches[batchIndex];
-                const vector = batch.getChildAt(colidx);
-                // yield all indices
-                for (let index = -1, length = batch.length; ++index < length;) {
-                    sum += (vector.get(index) >= value);
-                }
-            }
-            return sum;
-        }
-    } else if (test == 'eq') {
-        op = () => {
-            sum = 0;
-            let batches = table.chunks;
-            let numBatches = batches.length;
-            for (let batchIndex = -1; ++batchIndex < numBatches;) {
-                // load batches
-                const batch = batches[batchIndex];
-                const vector = batch.getChildAt(colidx);
-                // yield all indices
-                for (let index = -1, length = batch.length; ++index < length;) {
-                    sum += (vector.get(index) === value);
-                }
-            }
-            return sum;
-        }
-    } else {
-        throw new Error(`Unrecognized test "${test}"`);
-    }
-
-    return {
-        async: true,
-        name: `name: '${column}', length: ${table.length}, type: ${table.getColumnAt(colidx).type}, test: ${test}, value: ${value}\n`,
-        fn: op
-    };
-}
-
-function createDataFrameCountByTest(table, column) {
-    let colidx = table.schema.fields.findIndex((c)=> c.name === column);
-
-    return {
-        async: true,
-        name: `name: '${column}', length: ${table.length}, type: ${table.getColumnAt(colidx).type}\n`,
-        fn() {
-            table.countBy(column);
-        }
-    };
-}
-
-function createDataFrameFilterCountTest(table, column, test, value) {
-    let colidx = table.schema.fields.findIndex((c)=> c.name === column);
-    let df;
-
-    if (test == 'gt') {
-        df = table.filter(col(column).gt(value));
-    } else if (test == 'eq') {
-        df = table.filter(col(column).eq(value));
-    } else {
-        throw new Error(`Unrecognized test "${test}"`);
-    }
-
-    return {
-        async: true,
-        name: `name: '${column}', length: ${table.length}, type: ${table.getColumnAt(colidx).type}, test: ${test}, value: ${value}\n`,
-        fn() {
-            df.count();
-        }
-    };
-}
-
-function createDataFrameFilterIterateTest(table, column, test, value) {
-    let colidx = table.schema.fields.findIndex((c)=> c.name === column);
-    let df;
-
-    if (test == 'gt') {
-        df = table.filter(col(column).gt(value));
-    } else if (test == 'eq') {
-        df = table.filter(col(column).eq(value));
-    } else {
-        throw new Error(`Unrecognized test "${test}"`);
-    }
-
-    return {
-        async: true,
-        name: `name: '${column}', length: ${table.length}, type: ${table.getColumnAt(colidx).type}, test: ${test}, value: ${value}\n`,
-        fn() { for (value of df) {} }
-    };
-}
-
diff --git a/js/perf/table_config.js b/js/perf/table_config.js
deleted file mode 100644
index 2946b5a..0000000
--- a/js/perf/table_config.js
+++ /dev/null
@@ -1,48 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-const fs = require('fs');
-const path = require('path');
-const glob = require('glob');
-
-const config = [];
-const filenames = glob.sync(path.resolve(__dirname, `../test/data/tables/`, `*.arrow`));
-
-const countBys = {
-    tracks: ['origin', 'destination']
-}
-const counts = {
-    tracks: [
-        {col: 'lat',    test: 'gt', value: 0        },
-        {col: 'lng',    test: 'gt', value: 0        },
-        {col: 'origin', test: 'eq', value: 'Seattle'},
-    ]
-}
-
-for (const filename of filenames) {
-    const { name } = path.parse(filename);
-    if (name in counts) {
-        config.push({
-            name,
-            buffers: [fs.readFileSync(filename)],
-            countBys: countBys[name],
-            counts: counts[name],
-        });
-    }
-}
-
-module.exports = config;
diff --git a/js/src/Arrow.dom.ts b/js/src/Arrow.dom.ts
deleted file mode 100644
index 3872979..0000000
--- a/js/src/Arrow.dom.ts
+++ /dev/null
@@ -1,112 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import streamAdapters from './io/adapters';
-import { Builder } from './builder/index';
-import { RecordBatchReader, RecordBatchFileReader, RecordBatchStreamReader, } from './ipc/reader';
-import { RecordBatchWriter, RecordBatchFileWriter, RecordBatchStreamWriter, } from './ipc/writer';
-import { toDOMStream } from './io/whatwg/iterable';
-import { builderThroughDOMStream } from './io/whatwg/builder';
-import { recordBatchReaderThroughDOMStream } from './io/whatwg/reader';
-import { recordBatchWriterThroughDOMStream } from './io/whatwg/writer';
-
-streamAdapters.toDOMStream = toDOMStream;
-Builder['throughDOM'] = builderThroughDOMStream;
-RecordBatchReader['throughDOM'] = recordBatchReaderThroughDOMStream;
-RecordBatchFileReader['throughDOM'] = recordBatchReaderThroughDOMStream;
-RecordBatchStreamReader['throughDOM'] = recordBatchReaderThroughDOMStream;
-RecordBatchWriter['throughDOM'] = recordBatchWriterThroughDOMStream;
-RecordBatchFileWriter['throughDOM'] = recordBatchWriterThroughDOMStream;
-RecordBatchStreamWriter['throughDOM'] = recordBatchWriterThroughDOMStream;
-
-export {
-    DateUnit, IntervalUnit, MessageHeader, MetadataVersion, Precision, TimeUnit, Type, UnionMode, BufferType,
-    Data,
-    DataType,
-    Null,
-    Bool,
-    Int, Int8, Int16, Int32, Int64, Uint8, Uint16, Uint32, Uint64,
-    Float, Float16, Float32, Float64,
-    Utf8,
-    Binary,
-    FixedSizeBinary,
-    Date_, DateDay, DateMillisecond,
-    Timestamp, TimestampSecond, TimestampMillisecond, TimestampMicrosecond, TimestampNanosecond,
-    Time, TimeSecond, TimeMillisecond, TimeMicrosecond, TimeNanosecond,
-    Decimal,
-    List,
-    Struct,
-    Union, DenseUnion, SparseUnion,
-    Dictionary,
-    Interval, IntervalDayTime, IntervalYearMonth,
-    FixedSizeList,
-    Map_,
-    Table,
-    Column,
-    Schema, Field,
-    Visitor,
-    Vector,
-    BaseVector,
-    BinaryVector,
-    BoolVector,
-    Chunked,
-    DateVector, DateDayVector, DateMillisecondVector,
-    DecimalVector,
-    DictionaryVector,
-    FixedSizeBinaryVector,
-    FixedSizeListVector,
-    FloatVector, Float16Vector, Float32Vector, Float64Vector,
-    IntervalVector, IntervalDayTimeVector, IntervalYearMonthVector,
-    IntVector, Int8Vector, Int16Vector, Int32Vector, Int64Vector, Uint8Vector, Uint16Vector, Uint32Vector, Uint64Vector,
-    ListVector,
-    MapVector,
-    NullVector,
-    StructVector,
-    TimestampVector, TimestampSecondVector, TimestampMillisecondVector, TimestampMicrosecondVector, TimestampNanosecondVector,
-    TimeVector, TimeSecondVector, TimeMillisecondVector, TimeMicrosecondVector, TimeNanosecondVector,
-    UnionVector, DenseUnionVector, SparseUnionVector,
-    Utf8Vector,
-    ByteStream, AsyncByteStream, AsyncByteQueue, ReadableSource, WritableSink,
-    RecordBatchReader, RecordBatchFileReader, RecordBatchStreamReader, AsyncRecordBatchFileReader, AsyncRecordBatchStreamReader,
-    RecordBatchWriter, RecordBatchFileWriter, RecordBatchStreamWriter, RecordBatchJSONWriter,
-    MessageReader, AsyncMessageReader, JSONMessageReader,
-    Message,
-    RecordBatch,
-    ArrowJSONLike, FileHandle, Readable, Writable, ReadableWritable, ReadableDOMStreamOptions,
-    DataFrame, FilteredDataFrame, CountByResult, BindFunc, NextFunc,
-    predicate,
-    util,
-    Builder,
-    BinaryBuilder,
-    BoolBuilder,
-    DateBuilder, DateDayBuilder, DateMillisecondBuilder,
-    DecimalBuilder,
-    DictionaryBuilder,
-    FixedSizeBinaryBuilder,
-    FixedSizeListBuilder,
-    FloatBuilder, Float16Builder, Float32Builder, Float64Builder,
-    IntervalBuilder, IntervalDayTimeBuilder, IntervalYearMonthBuilder,
-    IntBuilder, Int8Builder, Int16Builder, Int32Builder, Int64Builder, Uint8Builder, Uint16Builder, Uint32Builder, Uint64Builder,
-    ListBuilder,
-    MapBuilder,
-    NullBuilder,
-    StructBuilder,
-    TimestampBuilder, TimestampSecondBuilder, TimestampMillisecondBuilder, TimestampMicrosecondBuilder, TimestampNanosecondBuilder,
-    TimeBuilder, TimeSecondBuilder, TimeMillisecondBuilder, TimeMicrosecondBuilder, TimeNanosecondBuilder,
-    UnionBuilder, DenseUnionBuilder, SparseUnionBuilder,
-    Utf8Builder,
-} from './Arrow';
diff --git a/js/src/Arrow.node.ts b/js/src/Arrow.node.ts
deleted file mode 100644
index 44221f6..0000000
--- a/js/src/Arrow.node.ts
+++ /dev/null
@@ -1,32 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import streamAdapters from './io/adapters';
-import { Builder } from './builder/index';
-import { RecordBatchReader } from './ipc/reader';
-import { RecordBatchWriter } from './ipc/writer';
-import { toNodeStream } from './io/node/iterable';
-import { builderThroughNodeStream } from './io/node/builder';
-import { recordBatchReaderThroughNodeStream } from './io/node/reader';
-import { recordBatchWriterThroughNodeStream } from './io/node/writer';
-
-streamAdapters.toNodeStream = toNodeStream;
-Builder['throughNode'] = builderThroughNodeStream;
-RecordBatchReader['throughNode'] = recordBatchReaderThroughNodeStream;
-RecordBatchWriter['throughNode'] = recordBatchWriterThroughNodeStream;
-
-export * from './Arrow.dom';
diff --git a/js/src/Arrow.ts b/js/src/Arrow.ts
deleted file mode 100644
index 41408c6..0000000
--- a/js/src/Arrow.ts
+++ /dev/null
@@ -1,134 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-export {
-    DateUnit,
-    TimeUnit,
-    Precision,
-    UnionMode,
-    IntervalUnit,
-    MetadataVersion,
-} from './fb/Schema';
-
-export { MessageHeader } from './fb/Message';
-
-export { Type, BufferType } from './enum';
-
-export { Data } from './data';
-export {
-    DataType,
-    Null,
-    Bool,
-    Int, Int8, Int16, Int32, Int64, Uint8, Uint16, Uint32, Uint64,
-    Float, Float16, Float32, Float64,
-    Utf8,
-    Binary,
-    FixedSizeBinary,
-    Date_, DateDay, DateMillisecond,
-    Timestamp, TimestampSecond, TimestampMillisecond, TimestampMicrosecond, TimestampNanosecond,
-    Time, TimeSecond, TimeMillisecond, TimeMicrosecond, TimeNanosecond,
-    Decimal,
-    List,
-    Struct,
-    Union, DenseUnion, SparseUnion,
-    Dictionary,
-    Interval, IntervalDayTime, IntervalYearMonth,
-    FixedSizeList,
-    Map_,
-} from './type';
-
-export { Table } from './table';
-export { Column } from './column';
-export { Visitor } from './visitor';
-export { Schema, Field } from './schema';
-export {
-    Vector,
-    BaseVector,
-    BinaryVector,
-    BoolVector,
-    Chunked,
-    DateVector, DateDayVector, DateMillisecondVector,
-    DecimalVector,
-    DictionaryVector,
-    FixedSizeBinaryVector,
-    FixedSizeListVector,
-    FloatVector, Float16Vector, Float32Vector, Float64Vector,
-    IntervalVector, IntervalDayTimeVector, IntervalYearMonthVector,
-    IntVector, Int8Vector, Int16Vector, Int32Vector, Int64Vector, Uint8Vector, Uint16Vector, Uint32Vector, Uint64Vector,
-    ListVector,
-    MapVector,
-    NullVector,
-    StructVector,
-    TimestampVector, TimestampSecondVector, TimestampMillisecondVector, TimestampMicrosecondVector, TimestampNanosecondVector,
-    TimeVector, TimeSecondVector, TimeMillisecondVector, TimeMicrosecondVector, TimeNanosecondVector,
-    UnionVector, DenseUnionVector, SparseUnionVector,
-    Utf8Vector,
-} from './vector/index';
-
-export {
-    Builder,
-    BinaryBuilder,
-    BoolBuilder,
-    DateBuilder, DateDayBuilder, DateMillisecondBuilder,
-    DecimalBuilder,
-    DictionaryBuilder,
-    FixedSizeBinaryBuilder,
-    FixedSizeListBuilder,
-    FloatBuilder, Float16Builder, Float32Builder, Float64Builder,
-    IntervalBuilder, IntervalDayTimeBuilder, IntervalYearMonthBuilder,
-    IntBuilder, Int8Builder, Int16Builder, Int32Builder, Int64Builder, Uint8Builder, Uint16Builder, Uint32Builder, Uint64Builder,
-    ListBuilder,
-    MapBuilder,
-    NullBuilder,
-    StructBuilder,
-    TimestampBuilder, TimestampSecondBuilder, TimestampMillisecondBuilder, TimestampMicrosecondBuilder, TimestampNanosecondBuilder,
-    TimeBuilder, TimeSecondBuilder, TimeMillisecondBuilder, TimeMicrosecondBuilder, TimeNanosecondBuilder,
-    UnionBuilder, DenseUnionBuilder, SparseUnionBuilder,
-    Utf8Builder,
-} from './builder/index';
-
-export { ByteStream, AsyncByteStream, AsyncByteQueue, ReadableSource, WritableSink } from './io/stream';
-export { RecordBatchReader, RecordBatchFileReader, RecordBatchStreamReader, AsyncRecordBatchFileReader, AsyncRecordBatchStreamReader } from './ipc/reader';
-export { RecordBatchWriter, RecordBatchFileWriter, RecordBatchStreamWriter, RecordBatchJSONWriter } from './ipc/writer';
-export { MessageReader, AsyncMessageReader, JSONMessageReader } from './ipc/message';
-export { Message } from './ipc/metadata/message';
-export { RecordBatch } from './recordbatch';
-export { ArrowJSONLike, FileHandle, Readable, Writable, ReadableWritable, ReadableDOMStreamOptions } from './io/interfaces';
-export { DataFrame, FilteredDataFrame, CountByResult, BindFunc, NextFunc } from './compute/dataframe';
-
-import * as util_bn_ from './util/bn';
-import * as util_int_ from './util/int';
-import * as util_bit_ from './util/bit';
-import * as util_math_ from './util/math';
-import * as util_buffer_ from './util/buffer';
-import * as util_vector_ from './util/vector';
-import * as predicate from './compute/predicate';
-import { compareSchemas, compareFields, compareTypes } from './visitor/typecomparator';
-
-export { predicate };
-/** @ignore */
-export const util = {
-    ...util_bn_,
-    ...util_int_,
-    ...util_bit_,
-    ...util_math_,
-    ...util_buffer_,
-    ...util_vector_,
-    compareSchemas,
-    compareFields,
-    compareTypes,
-};
diff --git a/js/src/bin/arrow2csv.ts b/js/src/bin/arrow2csv.ts
deleted file mode 100644
index 064b6ee..0000000
--- a/js/src/bin/arrow2csv.ts
+++ /dev/null
@@ -1,334 +0,0 @@
-#! /usr/bin/env node
-
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import * as fs from 'fs';
-import * as stream from 'stream';
-import { valueToString } from '../util/pretty';
-import { Schema, RecordBatch, RecordBatchReader, AsyncByteQueue } from '../Arrow.node';
-
-/* eslint-disable @typescript-eslint/no-require-imports */
-
-const padLeft = require('pad-left');
-const bignumJSONParse = require('json-bignum').parse;
-const argv = require(`command-line-args`)(cliOpts(), { partial: true });
-const files = argv.help ? [] : [...(argv.file || []), ...(argv._unknown || [])].filter(Boolean);
-
-const state = { ...argv, closed: false, maxColWidths: [10] };
-
-type ToStringState = {
-    hr: string;
-    sep: string;
-    schema: any;
-    closed: boolean;
-    metadata: boolean;
-    maxColWidths: number[];
-};
-
-(async () => {
-
-    const sources = argv.help ? [] : [
-        ...files.map((file) => () => fs.createReadStream(file)),
-        ...(process.stdin.isTTY ? [] : [() => process.stdin])
-    ].filter(Boolean) as (() => NodeJS.ReadableStream)[];
-
-    let reader: RecordBatchReader | null;
-    let hasReaders = false;
-
-    for (const source of sources) {
-        if (state.closed) { break; }
-        for await (reader of recordBatchReaders(source)) {
-            hasReaders = true;
-            const transformToString = batchesToString(state, reader.schema);
-            await pipeTo(
-                reader.pipe(transformToString),
-                process.stdout, { end: false }
-            ).catch(() => state.closed = true); // Handle EPIPE errors
-        }
-        if (state.closed) { break; }
-    }
-
-    return hasReaders ? 0 : print_usage();
-})()
-.then((x) => +x || 0, (err) => {
-    if (err) {
-        console.error(`${err && err.stack || err}`);
-    }
-    return process.exitCode || 1;
-}).then((code) => process.exit(code));
-
-function pipeTo(source: NodeJS.ReadableStream, sink: NodeJS.WritableStream, opts?: { end: boolean }) {
-    return new Promise((resolve, reject) => {
-
-        source.on('end', onEnd).pipe(sink, opts).on('error', onErr);
-
-        function onEnd() { done(undefined, resolve); }
-        function onErr(err: any) { done(err, reject); }
-        function done(e: any, cb: (e?: any) => void) {
-            source.removeListener('end', onEnd);
-            sink.removeListener('error', onErr);
-            cb(e);
-        }
-    });
-}
-
-async function *recordBatchReaders(createSourceStream: () => NodeJS.ReadableStream) {
-
-    const json = new AsyncByteQueue();
-    const stream = new AsyncByteQueue();
-    const source = createSourceStream();
-    let reader: RecordBatchReader | null = null;
-    let readers: AsyncIterable<RecordBatchReader> | null = null;
-    // tee the input source, just in case it's JSON
-    source.on('end', () => [stream, json].forEach((y) => y.close()))
-        .on('data', (x) => [stream, json].forEach((y) => y.write(x)))
-       .on('error', (e) => [stream, json].forEach((y) => y.abort(e)));
-
-    try {
-        for await (reader of RecordBatchReader.readAll(stream)) {
-            reader && (yield reader);
-        }
-        if (reader) return;
-    } catch (e) { readers = null; }
-
-    if (!readers) {
-        await json.closed;
-        if (source instanceof fs.ReadStream) { source.close(); }
-        // If the data in the `json` ByteQueue parses to JSON, then assume it's Arrow JSON from a file or stdin
-        try {
-            for await (reader of RecordBatchReader.readAll(bignumJSONParse(await json.toString()))) {
-                reader && (yield reader);
-            }
-        } catch (e) { readers = null; }
-    }
-}
-
-function batchesToString(state: ToStringState, schema: Schema) {
-
-    let rowId = 0;
-    let batchId = -1;
-    let maxColWidths = [10];
-    const { hr, sep } = state;
-
-    const header = ['row_id', ...schema.fields.map((f) => `${f}`)].map(valueToString);
-
-    state.maxColWidths = header.map((x, i) => Math.max(maxColWidths[i] || 0, x.length));
-
-    return new stream.Transform({
-        encoding: 'utf8',
-        writableObjectMode: true,
-        readableObjectMode: false,
-        final(cb: (error?: Error | null) => void) {
-            // if there were no batches, then print the Schema, and metadata
-            if (batchId === -1) {
-                hr && this.push(`${horizontalRule(state.maxColWidths, hr, sep)}\n\n`);
-                this.push(`${formatRow(header, maxColWidths, sep)}\n`);
-                if (state.metadata && schema.metadata.size > 0) {
-                    this.push(`metadata:\n${formatMetadata(schema.metadata)}\n`);
-                }
-            }
-            hr && this.push(`${horizontalRule(state.maxColWidths, hr, sep)}\n\n`);
-            cb();
-        },
-        transform(batch: RecordBatch, _enc: string, cb: (error?: Error, data?: any) => void) {
-
-            batch = !(state.schema && state.schema.length) ? batch : batch.select(...state.schema);
-
-            if (state.closed) { return cb(undefined, null); }
-
-            // Pass one to convert to strings and count max column widths
-            state.maxColWidths = measureColumnWidths(rowId, batch, header.map((x, i) => Math.max(maxColWidths[i] || 0, x.length)));
-
-            // If this is the first batch in a stream, print a top horizontal rule, schema metadata, and
-            if (++batchId === 0) {
-                hr && this.push(`${horizontalRule(state.maxColWidths, hr, sep)}\n`);
-                if (state.metadata && batch.schema.metadata.size > 0) {
-                    this.push(`metadata:\n${formatMetadata(batch.schema.metadata)}\n`);
-                    hr && this.push(`${horizontalRule(state.maxColWidths, hr, sep)}\n`);
-                }
-                if (batch.length <= 0 || batch.numCols <= 0) {
-                    this.push(`${formatRow(header, maxColWidths = state.maxColWidths, sep)}\n`);
-                }
-            }
-
-            if (batch.length > 0 && batch.numCols > 0) {
-                // If any of the column widths changed, print the header again
-                if (rowId % 350 !== 0 && JSON.stringify(state.maxColWidths) !== JSON.stringify(maxColWidths)) {
-                    this.push(`${formatRow(header, state.maxColWidths, sep)}\n`);
-                }
-                maxColWidths = state.maxColWidths;
-                for (const row of batch) {
-                    if (state.closed) { break; } else if (!row) { continue; }
-                    if (rowId++ % 350 === 0) {
-                        this.push(`${formatRow(header, maxColWidths, sep)}\n`);
-                    }
-                    this.push(`${formatRow([rowId, ...row.toArray()].map(valueToString), maxColWidths, sep)}\n`);
-                }
-            }
-            cb();
-        }
-    });
-}
-
-function horizontalRule(maxColWidths: number[], hr = '', sep = ' | ') {
-    return ` ${padLeft('', maxColWidths.reduce((x, y) => x + y, -2 + maxColWidths.length * sep.length), hr)}`;
-}
-
-function formatRow(row: string[] = [], maxColWidths: number[] = [], sep = ' | ') {
-    return `${row.map((x, j) => padLeft(x, maxColWidths[j])).join(sep)}`;
-}
-
-function formatMetadata(metadata: Map<string, string>) {
-
-    return [...metadata].map(([key, val]) =>
-        `  ${key}: ${formatMetadataValue(val)}`
-    ).join(',  \n');
-
-    function formatMetadataValue(value = '') {
-        let parsed = value;
-        try {
-            parsed = JSON.stringify(JSON.parse(value), null, 2);
-        } catch (e) { parsed = value; }
-        return valueToString(parsed).split('\n').join('\n  ');
-    }
-}
-
-function measureColumnWidths(rowId: number, batch: RecordBatch, maxColWidths: number[] = []) {
-    let val: any, j = 0;
-    for (const row of batch) {
-        if (!row) { continue; }
-        maxColWidths[j = 0] = Math.max(maxColWidths[0] || 0, (`${rowId++}`).length);
-        for (val of row) {
-            if (val && typedArrayElementWidths.has(val.constructor) && (typeof val[Symbol.toPrimitive] !== 'function')) {
-                // If we're printing a column of TypedArrays, ensure the column is wide enough to accommodate
-                // the widest possible element for a given byte size, since JS omits leading zeroes. For example:
-                // 1 |  [1137743649,2170567488,244696391,2122556476]
-                // 2 |                                          null
-                // 3 |   [637174007,2142281880,961736230,2912449282]
-                // 4 |    [1035112265,21832886,412842672,2207710517]
-                // 5 |                                          null
-                // 6 |                                          null
-                // 7 |     [2755142991,4192423256,2994359,467878370]
-                const elementWidth = typedArrayElementWidths.get(val.constructor)!;
-
-                maxColWidths[j + 1] = Math.max(maxColWidths[j + 1] || 0,
-                    2 + // brackets on each end
-                    (val.length - 1) + // commas between elements
-                    (val.length * elementWidth) // width of stringified 2^N-1
-                );
-            } else {
-                maxColWidths[j + 1] = Math.max(maxColWidths[j + 1] || 0, valueToString(val).length);
-            }
-            ++j;
-        }
-    }
-    return maxColWidths;
-}
-
-// Measure the stringified representation of 2^N-1 for each TypedArray variant
-const typedArrayElementWidths = (() => {
-    const maxElementWidth = (ArrayType: any) => {
-        const octets = Array.from({ length: ArrayType.BYTES_PER_ELEMENT - 1 }, _ => 255);
-        return `${new ArrayType(new Uint8Array([...octets, 254]).buffer)[0]}`.length;
-    };
-    return new Map<any, number>([
-        [Int8Array, maxElementWidth(Int8Array)],
-        [Int16Array, maxElementWidth(Int16Array)],
-        [Int32Array, maxElementWidth(Int32Array)],
-        [Uint8Array, maxElementWidth(Uint8Array)],
-        [Uint16Array, maxElementWidth(Uint16Array)],
-        [Uint32Array, maxElementWidth(Uint32Array)],
-        [Float32Array, maxElementWidth(Float32Array)],
-        [Float64Array, maxElementWidth(Float64Array)],
-        [Uint8ClampedArray, maxElementWidth(Uint8ClampedArray)]
-    ]);
-})();
-
-function cliOpts() {
-    return [
-        {
-            type: String,
-            name: 'schema', alias: 's',
-            optional: true, multiple: true,
-            typeLabel: '{underline columns}',
-            description: 'A space-delimited list of column names'
-        },
-        {
-            type: String,
-            name: 'file', alias: 'f',
-            optional: true, multiple: true,
-            description: 'The Arrow file to read'
-        },
-        {
-            type: String,
-            name: 'sep', optional: true, default: ' | ',
-            description: 'The column separator character (default: " | ")'
-        },
-        {
-            type: String,
-            name: 'hr', optional: true, default: '',
-            description: 'The horizontal border character (default: "")'
-        },
-        {
-            type: Boolean,
-            name: 'metadata', alias: 'm',
-            optional: true, default: false,
-            description: 'Flag to print Schema metadata (default: false)'
-        },
-        {
-            type: Boolean,
-            name: 'help', optional: true, default: false,
-            description: 'Print this usage guide.'
-        }
-    ];
-}
-
-function print_usage() {
-    console.log(require('command-line-usage')([
-        {
-            header: 'arrow2csv',
-            content: 'Print a CSV from an Arrow file'
-        },
-        {
-            header: 'Synopsis',
-            content: [
-                '$ arrow2csv {underline file.arrow} [{bold --schema} column_name ...]',
-                '$ arrow2csv [{bold --schema} column_name ...] [{bold --file} {underline file.arrow}]',
-                '$ arrow2csv {bold -s} column_1 {bold -s} column_2 [{bold -f} {underline file.arrow}]',
-                '$ arrow2csv [{bold --help}]'
-            ]
-        },
-        {
-            header: 'Options',
-            optionList: cliOpts()
-        },
-        {
-            header: 'Example',
-            content: [
-                '$ arrow2csv --schema foo baz --sep " , " -f simple.arrow',
-                '>   "row_id", "foo: Int32", "baz: Utf8"',
-                '>          0,            1,        "aa"',
-                '>          1,         null,        null',
-                '>          2,            3,        null',
-                '>          3,            4,       "bbb"',
-                '>          4,            5,      "cccc"',
-            ]
-        }
-    ]));
-    return 1;
-}
diff --git a/js/src/builder.ts b/js/src/builder.ts
deleted file mode 100644
index 86db953..0000000
--- a/js/src/builder.ts
+++ /dev/null
@@ -1,527 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Vector } from './vector';
-import { BufferType } from './enum';
-import { Data, Buffers } from './data';
-import { createIsValidFunction } from './builder/valid';
-import { BuilderType as B, VectorType as V} from './interfaces';
-import { BufferBuilder, BitmapBufferBuilder, DataBufferBuilder, OffsetsBufferBuilder } from './builder/buffer';
-import {
-    DataType, strideForType,
-    Float, Int, Decimal, FixedSizeBinary,
-    Date_, Time, Timestamp, Interval,
-    Utf8, Binary, List, Map_
-} from './type';
-
-/**
- * A set of options required to create a `Builder` instance for a given `DataType`.
- * @see {@link Builder}
- */
-export interface BuilderOptions<T extends DataType = any, TNull = any> {
-    type: T;
-    nullValues?: TNull[] | ReadonlyArray<TNull> | null;
-    children?: { [key: string]: BuilderOptions } | BuilderOptions[];
-}
-
-/**
- * A set of options to create an Iterable or AsyncIterable `Builder` transform function.
- * @see {@link Builder.throughIterable}
- * @see {@link Builder.throughAsyncIterable}
- */
-
-export interface IterableBuilderOptions<T extends DataType = any, TNull = any> extends BuilderOptions<T, TNull> {
-    highWaterMark?: number;
-    queueingStrategy?: 'bytes' | 'count';
-    dictionaryHashFunction?: (value: any) => string | number;
-    valueToChildTypeId?: (builder: Builder<T, TNull>, value: any, offset: number) => number;
-}
-
-/**
- * An abstract base class for types that construct Arrow Vectors from arbitrary JavaScript values.
- *
- * A `Builder` is responsible for writing arbitrary JavaScript values
- * to ArrayBuffers and/or child Builders according to the Arrow specification
- * for each DataType, creating or resizing the underlying ArrayBuffers as necessary.
- *
- * The `Builder` for each Arrow `DataType` handles converting and appending
- * values for a given `DataType`. The high-level {@link Builder.new `Builder.new()`} convenience
- * method creates the specific `Builder` subclass for the supplied `DataType`.
- *
- * Once created, `Builder` instances support both appending values to the end
- * of the `Builder`, and random-access writes to specific indices
- * (`Builder.prototype.append(value)` is a convenience method for
- * `builder.set(builder.length, value)`). Appending or setting values beyond the
- * Builder's current length may cause the builder to grow its underlying buffers
- * or child Builders (if applicable) to accommodate the new values.
- *
- * After enough values have been written to a `Builder`, `Builder.prototype.flush()`
- * will commit the values to the underlying ArrayBuffers (or child Builders). The
- * internal Builder state will be reset, and an instance of `Data<T>` is returned.
- * Alternatively, `Builder.prototype.toVector()` will flush the `Builder` and return
- * an instance of `Vector<T>` instead.
- *
- * When there are no more values to write, use `Builder.prototype.finish()` to
- * finalize the `Builder`. This does not reset the internal state, so it is
- * necessary to call `Builder.prototype.flush()` or `toVector()` one last time
- * if there are still values queued to be flushed.
- *
- * Note: calling `Builder.prototype.finish()` is required when using a `DictionaryBuilder`,
- * because this is when it flushes the values that have been enqueued in its internal
- * dictionary's `Builder`, and creates the `dictionaryVector` for the `Dictionary` `DataType`.
- *
- * ```ts
- * import { Builder, Utf8 } from 'apache-arrow';
- *
- * const utf8Builder = Builder.new({
- *     type: new Utf8(),
- *     nullValues: [null, 'n/a']
- * });
- *
- * utf8Builder
- *     .append('hello')
- *     .append('n/a')
- *     .append('world')
- *     .append(null);
- *
- * const utf8Vector = utf8Builder.finish().toVector();
- *
- * console.log(utf8Vector.toJSON());
- * // > ["hello", null, "world", null]
- * ```
- *
- * @typeparam T The `DataType` of this `Builder`.
- * @typeparam TNull The type(s) of values which will be considered null-value sentinels.
- */
-export abstract class Builder<T extends DataType = any, TNull = any> {
-
-    /**
-     * Create a `Builder` instance based on the `type` property of the supplied `options` object.
-     * @param {BuilderOptions<T, TNull>} options An object with a required `DataType` instance
-     * and other optional parameters to be passed to the `Builder` subclass for the given `type`.
-     *
-     * @typeparam T The `DataType` of the `Builder` to create.
-     * @typeparam TNull The type(s) of values which will be considered null-value sentinels.
-     * @nocollapse
-     */
-    // @ts-ignore
-    public static new<T extends DataType = any, TNull = any>(options: BuilderOptions<T, TNull>): B<T, TNull> {}
-
-    /** @nocollapse */
-    // @ts-ignore
-    public static throughNode<T extends DataType = any, TNull = any>(options: import('./io/node/builder').BuilderDuplexOptions<T, TNull>): import('stream').Duplex {
-        throw new Error(`"throughNode" not available in this environment`);
-    }
-    /** @nocollapse */
-    // @ts-ignore
-    public static throughDOM<T extends DataType = any, TNull = any>(options: import('./io/whatwg/builder').BuilderTransformOptions<T, TNull>): import('./io/whatwg/builder').BuilderTransform<T, TNull> {
-        throw new Error(`"throughDOM" not available in this environment`);
-    }
-
-    /**
-     * Transform a synchronous `Iterable` of arbitrary JavaScript values into a
-     * sequence of Arrow Vector<T> following the chunking semantics defined in
-     * the supplied `options` argument.
-     *
-     * This function returns a function that accepts an `Iterable` of values to
-     * transform. When called, this function returns an Iterator of `Vector<T>`.
-     *
-     * The resulting `Iterator<Vector<T>>` yields Vectors based on the
-     * `queueingStrategy` and `highWaterMark` specified in the `options` argument.
-     *
-     * * If `queueingStrategy` is `"count"` (or omitted), The `Iterator<Vector<T>>`
-     *   will flush the underlying `Builder` (and yield a new `Vector<T>`) once the
-     *   Builder's `length` reaches or exceeds the supplied `highWaterMark`.
-     * * If `queueingStrategy` is `"bytes"`, the `Iterator<Vector<T>>` will flush
-     *   the underlying `Builder` (and yield a new `Vector<T>`) once its `byteLength`
-     *   reaches or exceeds the supplied `highWaterMark`.
-     *
-     * @param {IterableBuilderOptions<T, TNull>} options An object of properties which determine the `Builder` to create and the chunking semantics to use.
-     * @returns A function which accepts a JavaScript `Iterable` of values to
-     *          write, and returns an `Iterator` that yields Vectors according
-     *          to the chunking semantics defined in the `options` argument.
-     * @nocollapse
-     */
-    public static throughIterable<T extends DataType = any, TNull = any>(options: IterableBuilderOptions<T, TNull>) {
-        return throughIterable(options);
-    }
-
-    /**
-     * Transform an `AsyncIterable` of arbitrary JavaScript values into a
-     * sequence of Arrow Vector<T> following the chunking semantics defined in
-     * the supplied `options` argument.
-     *
-     * This function returns a function that accepts an `AsyncIterable` of values to
-     * transform. When called, this function returns an AsyncIterator of `Vector<T>`.
-     *
-     * The resulting `AsyncIterator<Vector<T>>` yields Vectors based on the
-     * `queueingStrategy` and `highWaterMark` specified in the `options` argument.
-     *
-     * * If `queueingStrategy` is `"count"` (or omitted), The `AsyncIterator<Vector<T>>`
-     *   will flush the underlying `Builder` (and yield a new `Vector<T>`) once the
-     *   Builder's `length` reaches or exceeds the supplied `highWaterMark`.
-     * * If `queueingStrategy` is `"bytes"`, the `AsyncIterator<Vector<T>>` will flush
-     *   the underlying `Builder` (and yield a new `Vector<T>`) once its `byteLength`
-     *   reaches or exceeds the supplied `highWaterMark`.
-     *
-     * @param {IterableBuilderOptions<T, TNull>} options An object of properties which determine the `Builder` to create and the chunking semantics to use.
-     * @returns A function which accepts a JavaScript `AsyncIterable` of values
-     *          to write, and returns an `AsyncIterator` that yields Vectors
-     *          according to the chunking semantics defined in the `options`
-     *          argument.
-     * @nocollapse
-     */
-    public static throughAsyncIterable<T extends DataType = any, TNull = any>(options: IterableBuilderOptions<T, TNull>) {
-        return throughAsyncIterable(options);
-    }
-
-    /**
-     * Construct a builder with the given Arrow DataType with optional null values,
-     * which will be interpreted as "null" when set or appended to the `Builder`.
-     * @param {{ type: T, nullValues?: any[] }} options A `BuilderOptions` object used to create this `Builder`.
-     */
-    constructor({ 'type': type, 'nullValues': nulls }: BuilderOptions<T, TNull>) {
-        this.type = type;
-        this.children = [];
-        this.nullValues = nulls;
-        this.stride = strideForType(type);
-        this._nulls = new BitmapBufferBuilder();
-        if (nulls && nulls.length > 0) {
-            this._isValid = createIsValidFunction(nulls);
-        }
-    }
-
-    /**
-     * The Builder's `DataType` instance.
-     * @readonly
-     */
-    public type: T;
-    /**
-     * The number of values written to the `Builder` that haven't been flushed yet.
-     * @readonly
-     */
-    public length = 0;
-    /**
-     * A boolean indicating whether `Builder.prototype.finish()` has been called on this `Builder`.
-     * @readonly
-     */
-    public finished = false;
-    /**
-     * The number of elements in the underlying values TypedArray that
-     * represent a single logical element, determined by this Builder's
-     * `DataType`. This is 1 for most types, but is larger when the `DataType`
-     * is `Int64`, `Uint64`, `Decimal`, `DateMillisecond`, certain variants of
-     * `Interval`, `Time`, or `Timestamp`, `FixedSizeBinary`, and `FixedSizeList`.
-     * @readonly
-     */
-    public readonly stride: number;
-    public readonly children: Builder[];
-    /**
-     * The list of null-value sentinels for this `Builder`. When one of these values
-     * is written to the `Builder` (either via `Builder.prototype.set()` or `Builder.prototype.append()`),
-     * a 1-bit is written to this Builder's underlying null BitmapBufferBuilder.
-     * @readonly
-     */
-    public readonly nullValues?: TNull[] | ReadonlyArray<TNull> | null;
-
-    /**
-     * Flush the `Builder` and return a `Vector<T>`.
-     * @returns {Vector<T>} A `Vector<T>` of the flushed values.
-     */
-    public toVector() { return Vector.new(this.flush()); }
-
-    public get ArrayType() { return this.type.ArrayType; }
-    public get nullCount() { return this._nulls.numInvalid; }
-    public get numChildren() { return this.children.length; }
-
-    /**
-     * @returns The aggregate length (in bytes) of the values that have been written.
-     */
-    public get byteLength(): number {
-        let size = 0;
-        this._offsets && (size += this._offsets.byteLength);
-        this._values && (size += this._values.byteLength);
-        this._nulls && (size += this._nulls.byteLength);
-        this._typeIds && (size += this._typeIds.byteLength);
-        return this.children.reduce((size, child) => size + child.byteLength, size);
-    }
-
-    /**
-     * @returns The aggregate number of rows that have been reserved to write new values.
-     */
-    public get reservedLength(): number {
-        return this._nulls.reservedLength;
-    }
-
-    /**
-     * @returns The aggregate length (in bytes) that has been reserved to write new values.
-     */
-    public get reservedByteLength(): number {
-        let size = 0;
-        this._offsets && (size += this._offsets.reservedByteLength);
-        this._values && (size += this._values.reservedByteLength);
-        this._nulls && (size += this._nulls.reservedByteLength);
-        this._typeIds && (size += this._typeIds.reservedByteLength);
-        return this.children.reduce((size, child) => size + child.reservedByteLength, size);
-    }
-
-    protected _offsets!: DataBufferBuilder<Int32Array>;
-    public get valueOffsets() { return this._offsets ? this._offsets.buffer : null; }
-
-    protected _values!: BufferBuilder<T['TArray'], any>;
-    public get values() { return this._values ? this._values.buffer : null; }
-
-    protected _nulls: BitmapBufferBuilder;
-    public get nullBitmap() { return this._nulls ? this._nulls.buffer : null; }
-
-    protected _typeIds!: DataBufferBuilder<Int8Array>;
-    public get typeIds() { return this._typeIds ? this._typeIds.buffer : null; }
-
-    protected _isValid!: (value: T['TValue'] | TNull) => boolean;
-    protected _setValue!: (inst: Builder<T>, index: number, value: T['TValue']) => void;
-
-    /**
-     * Appends a value (or null) to this `Builder`.
-     * This is equivalent to `builder.set(builder.length, value)`.
-     * @param {T['TValue'] | TNull } value The value to append.
-     */
-    public append(value: T['TValue'] | TNull) { return this.set(this.length, value); }
-
-    /**
-     * Validates whether a value is valid (true), or null (false)
-     * @param {T['TValue'] | TNull } value The value to compare against null the value representations
-     */
-    public isValid(value: T['TValue'] | TNull): boolean { return this._isValid(value); }
-
-    /**
-     * Write a value (or null-value sentinel) at the supplied index.
-     * If the value matches one of the null-value representations, a 1-bit is
-     * written to the null `BitmapBufferBuilder`. Otherwise, a 0 is written to
-     * the null `BitmapBufferBuilder`, and the value is passed to
-     * `Builder.prototype.setValue()`.
-     * @param {number} index The index of the value to write.
-     * @param {T['TValue'] | TNull } value The value to write at the supplied index.
-     * @returns {this} The updated `Builder` instance.
-     */
-    public set(index: number, value: T['TValue'] | TNull) {
-        if (this.setValid(index, this.isValid(value))) {
-            this.setValue(index, value);
-        }
-        return this;
-    }
-
-    /**
-     * Write a value to the underlying buffers at the supplied index, bypassing
-     * the null-value check. This is a low-level method that
-     * @param {number} index
-     * @param {T['TValue'] | TNull } value
-     */
-    public setValue(index: number, value: T['TValue']) { this._setValue(this, index, value); }
-    public setValid(index: number, valid: boolean) {
-        this.length = this._nulls.set(index, +valid).length;
-        return valid;
-    }
-
-    // @ts-ignore
-    public addChild(child: Builder, name = `${this.numChildren}`) {
-        throw new Error(`Cannot append children to non-nested type "${this.type}"`);
-    }
-
-    /**
-     * Retrieve the child `Builder` at the supplied `index`, or null if no child
-     * exists at that index.
-     * @param {number} index The index of the child `Builder` to retrieve.
-     * @returns {Builder | null} The child Builder at the supplied index or null.
-     */
-    public getChildAt<R extends DataType = any>(index: number): Builder<R> | null {
-        return this.children[index] || null;
-    }
-
-    /**
-     * Commit all the values that have been written to their underlying
-     * ArrayBuffers, including any child Builders if applicable, and reset
-     * the internal `Builder` state.
-     * @returns A `Data<T>` of the buffers and childData representing the values written.
-     */
-    public flush() {
-
-        const buffers: any = [];
-        const values =  this._values;
-        const offsets =  this._offsets;
-        const typeIds =  this._typeIds;
-        const { length, nullCount } = this;
-
-        if (typeIds) { /* Unions */
-            buffers[BufferType.TYPE] = typeIds.flush(length);
-            // DenseUnions
-            offsets && (buffers[BufferType.OFFSET] = offsets.flush(length));
-        } else if (offsets) { /* Variable-width primitives (Binary, Utf8) and Lists */
-            // Binary, Utf8
-            values && (buffers[BufferType.DATA] = values.flush(offsets.last()));
-            buffers[BufferType.OFFSET] = offsets.flush(length);
-        } else if (values) { /* Fixed-width primitives (Int, Float, Decimal, Time, Timestamp, and Interval) */
-            buffers[BufferType.DATA] = values.flush(length);
-        }
-
-        nullCount > 0 && (buffers[BufferType.VALIDITY] = this._nulls.flush(length));
-
-        const data = Data.new<T>(
-            this.type, 0, length, nullCount, buffers as Buffers<T>,
-            this.children.map((child) => child.flush())) as Data<T>;
-
-        this.clear();
-
-        return data;
-    }
-
-    /**
-     * Finalize this `Builder`, and child builders if applicable.
-     * @returns {this} The finalized `Builder` instance.
-     */
-    public finish() {
-        this.finished = true;
-        this.children.forEach((child) => child.finish());
-        return this;
-    }
-
-    /**
-     * Clear this Builder's internal state, including child Builders if applicable, and reset the length to 0.
-     * @returns {this} The cleared `Builder` instance.
-     */
-    public clear() {
-        this.length = 0;
-        this._offsets && (this._offsets.clear());
-        this._values && (this._values.clear());
-        this._nulls && (this._nulls.clear());
-        this._typeIds && (this._typeIds.clear());
-        this.children.forEach((child) => child.clear());
-        return this;
-    }
-}
-
-(Builder.prototype as any).length = 1;
-(Builder.prototype as any).stride = 1;
-(Builder.prototype as any).children = null;
-(Builder.prototype as any).finished = false;
-(Builder.prototype as any).nullValues = null;
-(Builder.prototype as any)._isValid = () => true;
-
-/** @ignore */
-export abstract class FixedWidthBuilder<T extends Int | Float | FixedSizeBinary | Date_ | Timestamp | Time | Decimal | Interval = any, TNull = any> extends Builder<T, TNull> {
-    constructor(opts: BuilderOptions<T, TNull>) {
-        super(opts);
-        this._values = new DataBufferBuilder(new this.ArrayType(0), this.stride);
-    }
-    public setValue(index: number, value: T['TValue']) {
-        const values = this._values;
-        values.reserve(index - values.length + 1);
-        return super.setValue(index, value);
-    }
-}
-
-/** @ignore */
-export abstract class VariableWidthBuilder<T extends Binary | Utf8 | List | Map_, TNull = any> extends Builder<T, TNull> {
-    protected _pendingLength = 0;
-    protected _offsets: OffsetsBufferBuilder;
-    protected _pending: Map<number, any> | undefined;
-    constructor(opts: BuilderOptions<T, TNull>) {
-        super(opts);
-        this._offsets = new OffsetsBufferBuilder();
-    }
-    public setValue(index: number, value: T['TValue']) {
-        const pending = this._pending || (this._pending = new Map());
-        const current = pending.get(index);
-        current && (this._pendingLength -= current.length);
-        this._pendingLength += value.length;
-        pending.set(index, value);
-    }
-    public setValid(index: number, isValid: boolean) {
-        if (!super.setValid(index, isValid)) {
-            (this._pending || (this._pending = new Map())).set(index, undefined);
-            return false;
-        }
-        return true;
-    }
-    public clear() {
-        this._pendingLength = 0;
-        this._pending = undefined;
-        return super.clear();
-    }
-    public flush() {
-        this._flush();
-        return super.flush();
-    }
-    public finish() {
-        this._flush();
-        return super.finish();
-    }
-    protected _flush() {
-        const pending = this._pending;
-        const pendingLength = this._pendingLength;
-        this._pendingLength = 0;
-        this._pending = undefined;
-        if (pending && pending.size > 0) {
-            this._flushPending(pending, pendingLength);
-        }
-        return this;
-    }
-    protected abstract _flushPending(pending: Map<number, any>, pendingLength: number): void;
-}
-
-/** @ignore */
-type ThroughIterable<T extends DataType = any, TNull = any> = (source: Iterable<T['TValue'] | TNull>) => IterableIterator<V<T>>;
-
-/** @ignore */
-function throughIterable<T extends DataType = any, TNull = any>(options: IterableBuilderOptions<T, TNull>) {
-    const { ['queueingStrategy']: queueingStrategy = 'count' } = options;
-    const { ['highWaterMark']: highWaterMark = queueingStrategy !== 'bytes' ? 1000 : 2 ** 14 } = options;
-    const sizeProperty: 'length' | 'byteLength' = queueingStrategy !== 'bytes' ? 'length' : 'byteLength';
-    return function*(source: Iterable<T['TValue'] | TNull>) {
-        let numChunks = 0;
-        const builder = Builder.new(options);
-        for (const value of source) {
-            if (builder.append(value)[sizeProperty] >= highWaterMark) {
-                ++numChunks && (yield builder.toVector());
-            }
-        }
-        if (builder.finish().length > 0 || numChunks === 0) {
-            yield builder.toVector();
-        }
-    } as ThroughIterable<T, TNull>;
-}
-
-/** @ignore */
-type ThroughAsyncIterable<T extends DataType = any, TNull = any> = (source: Iterable<T['TValue'] | TNull> | AsyncIterable<T['TValue'] | TNull>) => AsyncIterableIterator<V<T>>;
-
-/** @ignore */
-function throughAsyncIterable<T extends DataType = any, TNull = any>(options: IterableBuilderOptions<T, TNull>) {
-    const { ['queueingStrategy']: queueingStrategy = 'count' } = options;
-    const { ['highWaterMark']: highWaterMark = queueingStrategy !== 'bytes' ? 1000 : 2 ** 14 } = options;
-    const sizeProperty: 'length' | 'byteLength' = queueingStrategy !== 'bytes' ? 'length' : 'byteLength';
-    return async function* (source: Iterable<T['TValue'] | TNull> | AsyncIterable<T['TValue'] | TNull>) {
-        let numChunks = 0;
-        const builder = Builder.new(options);
-        for await (const value of source) {
-            if (builder.append(value)[sizeProperty] >= highWaterMark) {
-                ++numChunks && (yield builder.toVector());
-            }
-        }
-        if (builder.finish().length > 0 || numChunks === 0) {
-            yield builder.toVector();
-        }
-    } as ThroughAsyncIterable<T, TNull>;
-}
diff --git a/js/src/builder/binary.ts b/js/src/builder/binary.ts
deleted file mode 100644
index 829da5c..0000000
--- a/js/src/builder/binary.ts
+++ /dev/null
@@ -1,54 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Binary } from '../type';
-import { toUint8Array } from '../util/buffer';
-import { BufferBuilder } from './buffer';
-import { VariableWidthBuilder, BuilderOptions } from '../builder';
-
-/** @ignore */
-export class BinaryBuilder<TNull = any> extends VariableWidthBuilder<Binary, TNull> {
-    constructor(opts: BuilderOptions<Binary, TNull>) {
-        super(opts);
-        this._values = new BufferBuilder(new Uint8Array(0));
-    }
-    public get byteLength(): number {
-        let size = this._pendingLength + (this.length * 4);
-        this._offsets && (size += this._offsets.byteLength);
-        this._values && (size += this._values.byteLength);
-        this._nulls && (size += this._nulls.byteLength);
-        return size;
-    }
-    public setValue(index: number, value: Uint8Array) {
-        return super.setValue(index, toUint8Array(value));
-    }
-    protected _flushPending(pending: Map<number, Uint8Array | undefined>, pendingLength: number) {
-        const offsets = this._offsets;
-        const data = this._values.reserve(pendingLength).buffer;
-        let index = 0, length = 0, offset = 0, value: Uint8Array | undefined;
-        for ([index, value] of pending) {
-            if (value === undefined) {
-                offsets.set(index, 0);
-            } else {
-                length = value.length;
-                data.set(value, offset);
-                offsets.set(index, length);
-                offset += length;
-            }
-        }
-    }
-}
diff --git a/js/src/builder/bool.ts b/js/src/builder/bool.ts
deleted file mode 100644
index 5c0e095..0000000
--- a/js/src/builder/bool.ts
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Bool } from '../type';
-import { BitmapBufferBuilder } from './buffer';
-import { Builder, BuilderOptions } from '../builder';
-
-/** @ignore */
-export class BoolBuilder<TNull = any> extends Builder<Bool, TNull> {
-    constructor(options: BuilderOptions<Bool, TNull>) {
-        super(options);
-        this._values = new BitmapBufferBuilder();
-    }
-    public setValue(index: number, value: boolean) {
-        this._values.set(index, +value);
-    }
-}
diff --git a/js/src/builder/buffer.ts b/js/src/builder/buffer.ts
deleted file mode 100644
index 3c20cc0..0000000
--- a/js/src/builder/buffer.ts
+++ /dev/null
@@ -1,182 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { memcpy } from '../util/buffer';
-import { BigIntAvailable, BigInt64Array, BigUint64Array } from '../util/compat';
-import {
-    TypedArray, TypedArrayConstructor,
-    BigIntArray, BigIntArrayConstructor
-} from '../interfaces';
-
-/** @ignore */ type DataValue<T> = T extends TypedArray ? number : T extends BigIntArray ? WideValue<T> : T;
-/** @ignore */ type WideValue<T extends BigIntArray> = T extends BigIntArray ? bigint | Int32Array | Uint32Array : never;
-/** @ignore */ type ArrayCtor<T extends TypedArray | BigIntArray> =
-    T extends TypedArray  ? TypedArrayConstructor<T>  :
-    T extends BigIntArray ? BigIntArrayConstructor<T> :
-    any;
-
-/** @ignore */
-const roundLengthUpToNearest64Bytes = (len: number, BPE: number) => ((((len * BPE) + 63) & ~63) || 64) / BPE;
-/** @ignore */
-const sliceOrExtendArray = <T extends TypedArray | BigIntArray>(arr: T, len = 0) => (
-    arr.length >= len ? arr.subarray(0, len) : memcpy(new (arr.constructor as any)(len), arr, 0)
-) as T;
-
-/** @ignore */
-export interface BufferBuilder<T extends TypedArray | BigIntArray = any, TValue = DataValue<T>> {
-    readonly offset: number;
-}
-
-/** @ignore */
-export class BufferBuilder<T extends TypedArray | BigIntArray = any, TValue = DataValue<T>> {
-
-    constructor(buffer: T, stride = 1) {
-        this.buffer = buffer;
-        this.stride = stride;
-        this.BYTES_PER_ELEMENT = buffer.BYTES_PER_ELEMENT;
-        this.ArrayType = buffer.constructor as ArrayCtor<T>;
-        this._resize(this.length = buffer.length / stride | 0);
-    }
-
-    public buffer: T;
-    public length: number;
-    public readonly stride: number;
-    public readonly ArrayType: ArrayCtor<T>;
-    public readonly BYTES_PER_ELEMENT: number;
-
-    public get byteLength() { return this.length * this.stride * this.BYTES_PER_ELEMENT | 0; }
-    public get reservedLength() { return this.buffer.length / this.stride; }
-    public get reservedByteLength() { return this.buffer.byteLength; }
-
-    // @ts-ignore
-    public set(index: number, value: TValue) { return this; }
-    public append(value: TValue) { return this.set(this.length, value); }
-    public reserve(extra: number) {
-        if (extra > 0) {
-            this.length += extra;
-            const stride = this.stride;
-            const length = this.length * stride;
-            const reserved = this.buffer.length;
-            if (length >= reserved) {
-                this._resize(reserved === 0
-                    ? roundLengthUpToNearest64Bytes(length * 1, this.BYTES_PER_ELEMENT)
-                    : roundLengthUpToNearest64Bytes(length * 2, this.BYTES_PER_ELEMENT)
-                );
-            }
-        }
-        return this;
-    }
-    public flush(length = this.length) {
-        length = roundLengthUpToNearest64Bytes(length * this.stride, this.BYTES_PER_ELEMENT);
-        const array = sliceOrExtendArray<T>(this.buffer, length);
-        this.clear();
-        return array;
-    }
-    public clear() {
-        this.length = 0;
-        this._resize(0);
-        return this;
-    }
-    protected _resize(newLength: number) {
-        return this.buffer = <T> memcpy(new this.ArrayType(newLength), this.buffer);
-    }
-}
-
-(BufferBuilder.prototype as any).offset = 0;
-
-/** @ignore */
-export class DataBufferBuilder<T extends TypedArray> extends BufferBuilder<T, number> {
-    public last() { return this.get(this.length - 1); }
-    public get(index: number) { return this.buffer[index]; }
-    public set(index: number, value: number) {
-        this.reserve(index - this.length + 1);
-        this.buffer[index * this.stride] = value;
-        return this;
-    }
-}
-
-/** @ignore */
-export class BitmapBufferBuilder extends DataBufferBuilder<Uint8Array> {
-
-    constructor(data = new Uint8Array(0)) { super(data, 1 / 8); }
-
-    public numValid = 0;
-    public get numInvalid() { return this.length - this.numValid; }
-    public get(idx: number) { return this.buffer[idx >> 3] >> idx % 8 & 1; }
-    public set(idx: number, val: number) {
-        const { buffer } = this.reserve(idx - this.length + 1);
-        const byte = idx >> 3, bit = idx % 8, cur = buffer[byte] >> bit & 1;
-        // If `val` is truthy and the current bit is 0, flip it to 1 and increment `numValid`.
-        // If `val` is falsey and the current bit is 1, flip it to 0 and decrement `numValid`.
-        val ? cur === 0 && ((buffer[byte] |=  (1 << bit)), ++this.numValid)
-            : cur === 1 && ((buffer[byte] &= ~(1 << bit)), --this.numValid);
-        return this;
-    }
-    public clear() {
-        this.numValid = 0;
-        return super.clear();
-    }
-}
-
-/** @ignore */
-export class OffsetsBufferBuilder extends DataBufferBuilder<Int32Array> {
-    constructor(data = new Int32Array(1)) { super(data, 1); }
-    public append(value: number) {
-        return this.set(this.length - 1, value);
-    }
-    public set(index: number, value: number) {
-        const offset = this.length - 1;
-        const buffer = this.reserve(index - offset + 1).buffer;
-        if (offset < index++) {
-            buffer.fill(buffer[offset], offset, index);
-        }
-        buffer[index] = buffer[index - 1] + value;
-        return this;
-    }
-    public flush(length = this.length - 1) {
-        if (length > this.length) {
-            this.set(length - 1, 0);
-        }
-        return super.flush(length + 1);
-    }
-}
-
-/** @ignore */
-export class WideBufferBuilder<T extends TypedArray, R extends BigIntArray> extends BufferBuilder<T, DataValue<T>> {
-    public buffer64!: R;
-    protected _ArrayType64!: BigIntArrayConstructor<R>;
-    public get ArrayType64() {
-        return this._ArrayType64 || (this._ArrayType64 = <BigIntArrayConstructor<R>> (this.buffer instanceof Int32Array ? BigInt64Array : BigUint64Array));
-    }
-    public set(index: number, value: DataValue<T>) {
-        this.reserve(index - this.length + 1);
-        switch (typeof value) {
-            case 'bigint': this.buffer64[index] = value; break;
-            case 'number': this.buffer[index * this.stride] = value; break;
-            default: this.buffer.set(value as TypedArray, index * this.stride);
-        }
-        return this;
-    }
-    protected _resize(newLength: number) {
-        const data = super._resize(newLength);
-        const length = data.byteLength / (this.BYTES_PER_ELEMENT * this.stride);
-        if (BigIntAvailable) {
-            this.buffer64 = new this.ArrayType64(data.buffer, data.byteOffset, length);
-        }
-        return data;
-    }
-}
diff --git a/js/src/builder/date.ts b/js/src/builder/date.ts
deleted file mode 100644
index e9748b5..0000000
--- a/js/src/builder/date.ts
+++ /dev/null
@@ -1,26 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { FixedWidthBuilder } from '../builder';
-import { Date_, DateDay, DateMillisecond } from '../type';
-
-/** @ignore */
-export class DateBuilder<T extends Date_ = Date_, TNull = any> extends FixedWidthBuilder<T, TNull> {}
-/** @ignore */
-export class DateDayBuilder<TNull = any> extends DateBuilder<DateDay, TNull> {}
-/** @ignore */
-export class DateMillisecondBuilder<TNull = any> extends DateBuilder<DateMillisecond, TNull> {}
diff --git a/js/src/builder/decimal.ts b/js/src/builder/decimal.ts
deleted file mode 100644
index 5814abd..0000000
--- a/js/src/builder/decimal.ts
+++ /dev/null
@@ -1,22 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Decimal } from '../type';
-import { FixedWidthBuilder } from '../builder';
-
-/** @ignore */
-export class DecimalBuilder<TNull = any> extends FixedWidthBuilder<Decimal, TNull> {}
diff --git a/js/src/builder/dictionary.ts b/js/src/builder/dictionary.ts
deleted file mode 100644
index 6602825..0000000
--- a/js/src/builder/dictionary.ts
+++ /dev/null
@@ -1,98 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Vector } from '../vector';
-import { IntBuilder } from './int';
-import { Dictionary, DataType } from '../type';
-import { Builder, BuilderOptions } from '../builder';
-
-type DictionaryHashFunction = (x: any) => string | number;
-
-export interface DictionaryBuilderOptions<T extends DataType = any, TNull = any> extends BuilderOptions<T, TNull> {
-    dictionaryHashFunction?: DictionaryHashFunction;
-}
-
-/** @ignore */
-export class DictionaryBuilder<T extends Dictionary, TNull = any> extends Builder<T, TNull> {
-
-    protected _dictionaryOffset: number;
-    protected _dictionary?: Vector<T['dictionary']>;
-    protected _keysToIndices: { [key: string]: number };
-    public readonly indices: IntBuilder<T['indices']>;
-    public readonly dictionary: Builder<T['dictionary']>;
-
-    constructor({ 'type': type, 'nullValues': nulls, 'dictionaryHashFunction': hashFn }: DictionaryBuilderOptions<T, TNull>) {
-        super({ type: new Dictionary(type.dictionary, type.indices, type.id, type.isOrdered) as T });
-        this._nulls = <any> null;
-        this._dictionaryOffset = 0;
-        this._keysToIndices = Object.create(null);
-        this.indices = Builder.new({ 'type': this.type.indices, 'nullValues': nulls }) as IntBuilder<T['indices']>;
-        this.dictionary = Builder.new({ 'type': this.type.dictionary, 'nullValues': null }) as Builder<T['dictionary']>;
-        if (typeof hashFn === 'function') {
-            this.valueToKey = hashFn;
-        }
-    }
-
-    public get values() { return this.indices.values; }
-    public get nullCount() { return this.indices.nullCount; }
-    public get nullBitmap() { return this.indices.nullBitmap; }
-    public get byteLength() { return this.indices.byteLength + this.dictionary.byteLength; }
-    public get reservedLength() { return this.indices.reservedLength + this.dictionary.reservedLength; }
-    public get reservedByteLength() { return this.indices.reservedByteLength + this.dictionary.reservedByteLength; }
-    public isValid(value: T['TValue'] | TNull) { return this.indices.isValid(value); }
-    public setValid(index: number, valid: boolean) {
-        const indices = this.indices;
-        valid = indices.setValid(index, valid);
-        this.length = indices.length;
-        return valid;
-    }
-    public setValue(index: number, value: T['TValue']) {
-        const keysToIndices = this._keysToIndices;
-        const key = this.valueToKey(value);
-        let idx = keysToIndices[key];
-        if (idx === undefined) {
-            keysToIndices[key] = idx = this._dictionaryOffset + this.dictionary.append(value).length - 1;
-        }
-        return this.indices.setValue(index, idx);
-    }
-    public flush() {
-        const type = this.type;
-        const prev = this._dictionary;
-        const curr = this.dictionary.toVector();
-        const data = this.indices.flush().clone(type);
-        data.dictionary = prev ? prev.concat(curr) : curr;
-        this.finished || (this._dictionaryOffset += curr.length);
-        this._dictionary = data.dictionary as Vector<T['dictionary']>;
-        this.clear();
-        return data;
-    }
-    public finish() {
-        this.indices.finish();
-        this.dictionary.finish();
-        this._dictionaryOffset = 0;
-        this._keysToIndices = Object.create(null);
-        return super.finish();
-    }
-    public clear() {
-        this.indices.clear();
-        this.dictionary.clear();
-        return super.clear();
-    }
-    public valueToKey(val: any): string | number {
-        return typeof val === 'string' ? val : `${val}`;
-    }
-}
diff --git a/js/src/builder/fixedsizebinary.ts b/js/src/builder/fixedsizebinary.ts
deleted file mode 100644
index 99aaf46..0000000
--- a/js/src/builder/fixedsizebinary.ts
+++ /dev/null
@@ -1,22 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { FixedSizeBinary } from '../type';
-import { FixedWidthBuilder } from '../builder';
-
-/** @ignore */
-export class FixedSizeBinaryBuilder<TNull = any> extends FixedWidthBuilder<FixedSizeBinary, TNull> {}
diff --git a/js/src/builder/fixedsizelist.ts b/js/src/builder/fixedsizelist.ts
deleted file mode 100644
index cc20f5b..0000000
--- a/js/src/builder/fixedsizelist.ts
+++ /dev/null
@@ -1,41 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Run } from './run';
-import { Field } from '../schema';
-import { Builder } from '../builder';
-import { DataType, FixedSizeList } from '../type';
-
-/** @ignore */
-export class FixedSizeListBuilder<T extends DataType = any, TNull = any> extends Builder<FixedSizeList<T>, TNull> {
-    protected _run = new Run<T, TNull>();
-    public setValue(index: number, value: T['TValue']) {
-        super.setValue(index, this._run.bind(value));
-    }
-    public addChild(child: Builder<T>, name = '0') {
-        if (this.numChildren > 0) {
-            throw new Error('FixedSizeListBuilder can only have one child.');
-        }
-        const childIndex = this.children.push(child);
-        this.type = new FixedSizeList(this.type.listSize, new Field(name, child.type, true));
-        return childIndex;
-    }
-    public clear() {
-        this._run.clear();
-        return super.clear();
-    }
-}
diff --git a/js/src/builder/float.ts b/js/src/builder/float.ts
deleted file mode 100644
index dbf4c0d..0000000
--- a/js/src/builder/float.ts
+++ /dev/null
@@ -1,45 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { float64ToUint16 } from '../util/math';
-import { FixedWidthBuilder } from '../builder';
-import { Float, Float16, Float32, Float64 } from '../type';
-
-/** @ignore */
-export class FloatBuilder<T extends Float = Float, TNull = any> extends FixedWidthBuilder<T, TNull> {}
-
-/** @ignore */
-export class Float16Builder<TNull = any> extends FloatBuilder<Float16, TNull> {
-    public setValue(index: number, value: number) {
-        // convert JS float64 to a uint16
-        this._values.set(index, float64ToUint16(value));
-    }
-}
-
-/** @ignore */
-export class Float32Builder<TNull = any> extends FloatBuilder<Float32, TNull> {
-    public setValue(index: number, value: number) {
-        this._values.set(index, value);
-    }
-}
-
-/** @ignore */
-export class Float64Builder<TNull = any> extends FloatBuilder<Float64, TNull> {
-    public setValue(index: number, value: number) {
-        this._values.set(index, value);
-    }
-}
diff --git a/js/src/builder/index.ts b/js/src/builder/index.ts
deleted file mode 100644
index dfd9d54..0000000
--- a/js/src/builder/index.ts
+++ /dev/null
@@ -1,82 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-/** @ignore */
-export { Builder, BuilderOptions } from '../builder';
-export { BoolBuilder } from './bool';
-export { NullBuilder } from './null';
-export { DateBuilder, DateDayBuilder, DateMillisecondBuilder } from './date';
-export { DecimalBuilder } from './decimal';
-export { DictionaryBuilder } from './dictionary';
-export { FixedSizeBinaryBuilder } from './fixedsizebinary';
-export { FloatBuilder, Float16Builder, Float32Builder, Float64Builder } from './float';
-export { IntBuilder, Int8Builder, Int16Builder, Int32Builder, Int64Builder, Uint8Builder, Uint16Builder, Uint32Builder, Uint64Builder } from './int';
-export { TimeBuilder, TimeSecondBuilder, TimeMillisecondBuilder, TimeMicrosecondBuilder, TimeNanosecondBuilder } from './time';
-export { TimestampBuilder, TimestampSecondBuilder, TimestampMillisecondBuilder, TimestampMicrosecondBuilder, TimestampNanosecondBuilder } from './timestamp';
-export { IntervalBuilder, IntervalDayTimeBuilder, IntervalYearMonthBuilder } from './interval';
-export { Utf8Builder } from './utf8';
-export { BinaryBuilder } from './binary';
-export { ListBuilder } from './list';
-export { FixedSizeListBuilder } from './fixedsizelist';
-export { MapBuilder } from './map';
-export { StructBuilder } from './struct';
-export { UnionBuilder, SparseUnionBuilder, DenseUnionBuilder } from './union';
-
-import { Type } from '../enum';
-import { Field } from '../schema';
-import { DataType } from '../type';
-import { Utf8Builder } from './utf8';
-import { BuilderType as B } from '../interfaces';
-import { Builder, BuilderOptions } from '../builder';
-import { instance as setVisitor } from '../visitor/set';
-import { instance as getBuilderConstructor } from '../visitor/builderctor';
-
-/** @nocollapse */
-Builder.new = newBuilder;
-
-function newBuilder<T extends DataType = any, TNull = any>(options: BuilderOptions<T, TNull>): B<T, TNull> {
-
-    const type = options.type;
-    const builder = new (getBuilderConstructor.getVisitFn<T>(type)())(options) as Builder<T, TNull>;
-
-    if (type.children && type.children.length > 0) {
-
-        const children = options['children'] || [] as BuilderOptions[];
-        const defaultOptions = { 'nullValues': options['nullValues'] };
-        const getChildOptions = Array.isArray(children)
-            ? ((_: Field, i: number) => children[i] || defaultOptions)
-            : (({ name }: Field) => children[name] || defaultOptions);
-
-        type.children.forEach((field, index) => {
-            const { type } = field;
-            const opts = getChildOptions(field, index);
-            builder.children.push(newBuilder({ ...opts, type }));
-        });
-    }
-
-    return builder as B<T, TNull>;
-}
-
-(Object.keys(Type) as any[])
-    .map((T: any) => Type[T] as any)
-    .filter((T: any): T is Type => typeof T === 'number' && T !== Type.NONE)
-    .forEach((typeId) => {
-        const BuilderCtor = getBuilderConstructor.visit(typeId);
-        BuilderCtor.prototype._setValue = setVisitor.getVisitFn(typeId);
-    });
-
-(Utf8Builder.prototype as any)._setValue = setVisitor.visitBinary;
diff --git a/js/src/builder/int.ts b/js/src/builder/int.ts
deleted file mode 100644
index 5777bd1..0000000
--- a/js/src/builder/int.ts
+++ /dev/null
@@ -1,80 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { bignumToBigInt } from '../util/bn';
-import { WideBufferBuilder } from './buffer';
-import { BigInt64Array } from '../util/compat';
-import { FixedWidthBuilder, BuilderOptions } from '../builder';
-import { Int, Int8, Int16, Int32, Int64, Uint8, Uint16, Uint32, Uint64 } from '../type';
-
-/** @ignore */
-export class IntBuilder<T extends Int = Int, TNull = any> extends FixedWidthBuilder<T, TNull> {
-    public setValue(index: number, value: T['TValue']) {
-        this._values.set(index, value);
-    }
-}
-
-/** @ignore */
-export class Int8Builder<TNull = any> extends IntBuilder<Int8, TNull> {}
-/** @ignore */
-export class Int16Builder<TNull = any> extends IntBuilder<Int16, TNull> {}
-/** @ignore */
-export class Int32Builder<TNull = any> extends IntBuilder<Int32, TNull> {}
-/** @ignore */
-export class Int64Builder<TNull = any> extends IntBuilder<Int64, TNull> {
-    protected _values: WideBufferBuilder<Int32Array, BigInt64Array>;
-    constructor(options: BuilderOptions<Int64, TNull>) {
-        if (options['nullValues']) {
-            options['nullValues'] = (options['nullValues'] as TNull[]).map(toBigInt);
-        }
-        super(options);
-        this._values = new WideBufferBuilder(new Int32Array(0), 2);
-    }
-    public get values64() { return this._values.buffer64; }
-    public isValid(value: Int32Array | bigint | TNull) { return super.isValid(toBigInt(value)); }
-}
-
-/** @ignore */
-export class Uint8Builder<TNull = any> extends IntBuilder<Uint8, TNull> {}
-/** @ignore */
-export class Uint16Builder<TNull = any> extends IntBuilder<Uint16, TNull> {}
-/** @ignore */
-export class Uint32Builder<TNull = any> extends IntBuilder<Uint32, TNull> {}
-/** @ignore */
-export class Uint64Builder<TNull = any> extends IntBuilder<Uint64, TNull> {
-    protected _values: WideBufferBuilder<Uint32Array, BigUint64Array>;
-    constructor(options: BuilderOptions<Uint64, TNull>) {
-        if (options['nullValues']) {
-            options['nullValues'] = (options['nullValues'] as TNull[]).map(toBigInt);
-        }
-        super(options);
-        this._values = new WideBufferBuilder(new Uint32Array(0), 2);
-    }
-    public get values64() { return this._values.buffer64; }
-    public isValid(value: Uint32Array | bigint | TNull) { return super.isValid(toBigInt(value)); }
-}
-
-const toBigInt = ((memo: any) => (value: any) => {
-    if (ArrayBuffer.isView(value)) {
-        memo.buffer = value.buffer;
-        memo.byteOffset = value.byteOffset;
-        memo.byteLength = value.byteLength;
-        value = bignumToBigInt(memo);
-        memo.buffer = null;
-    }
-    return value;
-})({ 'BigIntArray': BigInt64Array });
diff --git a/js/src/builder/interval.ts b/js/src/builder/interval.ts
deleted file mode 100644
index 3742282..0000000
--- a/js/src/builder/interval.ts
+++ /dev/null
@@ -1,26 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { FixedWidthBuilder } from '../builder';
-import { Interval, IntervalDayTime, IntervalYearMonth } from '../type';
-
-/** @ignore */
-export class IntervalBuilder<T extends Interval = Interval, TNull = any> extends FixedWidthBuilder<T, TNull> {}
-/** @ignore */
-export class IntervalDayTimeBuilder<TNull = any> extends IntervalBuilder<IntervalDayTime, TNull> {}
-/** @ignore */
-export class IntervalYearMonthBuilder<TNull = any> extends IntervalBuilder<IntervalYearMonth, TNull> {}
diff --git a/js/src/builder/list.ts b/js/src/builder/list.ts
deleted file mode 100644
index 844681e..0000000
--- a/js/src/builder/list.ts
+++ /dev/null
@@ -1,58 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Run } from './run';
-import { Field } from '../schema';
-import { DataType, List } from '../type';
-import { OffsetsBufferBuilder } from './buffer';
-import { Builder, BuilderOptions, VariableWidthBuilder } from '../builder';
-
-/** @ignore */
-export class ListBuilder<T extends DataType = any, TNull = any> extends VariableWidthBuilder<List<T>, TNull> {
-    protected _run = new Run<T, TNull>();
-    protected _offsets: OffsetsBufferBuilder;
-    constructor(opts: BuilderOptions<List<T>, TNull>) {
-        super(opts);
-        this._offsets = new OffsetsBufferBuilder();
-    }
-    public addChild(child: Builder<T>, name = '0') {
-        if (this.numChildren > 0) {
-            throw new Error('ListBuilder can only have one child.');
-        }
-        this.children[this.numChildren] = child;
-        this.type = new List(new Field(name, child.type, true));
-        return this.numChildren - 1;
-    }
-    public clear() {
-        this._run.clear();
-        return super.clear();
-    }
-    protected _flushPending(pending: Map<number, T['TValue'] | undefined>) {
-        const run = this._run;
-        const offsets = this._offsets;
-        const setValue = this._setValue;
-        let index = 0, value: Uint8Array | undefined;
-        for ([index, value] of pending) {
-            if (value === undefined) {
-                offsets.set(index, 0);
-            } else {
-                offsets.set(index, value.length);
-                setValue(this, index, run.bind(value));
-            }
-        }
-    }
-}
diff --git a/js/src/builder/map.ts b/js/src/builder/map.ts
deleted file mode 100644
index 25affef..0000000
--- a/js/src/builder/map.ts
+++ /dev/null
@@ -1,64 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Field } from '../schema';
-import { DataType, Map_, Struct } from '../type';
-import { Builder, VariableWidthBuilder } from '../builder';
-
-/** @ignore */ type MapValue<K extends DataType = any, V extends DataType = any> = Map_<K, V>['TValue'];
-/** @ignore */ type MapValues<K extends DataType = any, V extends DataType = any> = Map<number, MapValue<K, V> | undefined>;
-/** @ignore */ type MapValueExt<K extends DataType = any, V extends DataType = any> = MapValue<K, V> | { [key: string]: V } | { [key: number]: V } ;
-
-/** @ignore */
-export class MapBuilder<K extends DataType = any, V extends DataType = any, TNull = any> extends VariableWidthBuilder<Map_<K, V>, TNull> {
-
-    protected _pending: MapValues<K, V> | undefined;
-    public set(index: number, value: MapValueExt<K, V> | TNull) {
-        return super.set(index, value as MapValue<K, V> | TNull);
-    }
-
-    public setValue(index: number, value: MapValueExt<K, V>) {
-        value = value instanceof Map ? value : new Map(Object.entries(value));
-        const pending = this._pending || (this._pending = new Map() as MapValues<K, V>);
-        const current = pending.get(index);
-        current && (this._pendingLength -= current.size);
-        this._pendingLength += value.size;
-        pending.set(index, value);
-    }
-
-    public addChild(child: Builder<Struct<{ key: K; value: V }>>, name = `${this.numChildren}`) {
-        if (this.numChildren > 0) {
-            throw new Error('ListBuilder can only have one child.');
-        }
-        this.children[this.numChildren] = child;
-        this.type = new Map_<K, V>(new Field(name, child.type, true), this.type.keysSorted);
-        return this.numChildren - 1;
-    }
-
-    protected _flushPending(pending: MapValues<K, V>) {
-        const offsets = this._offsets;
-        const setValue = this._setValue;
-        pending.forEach((value, index) => {
-            if (value === undefined) {
-                offsets.set(index, 0);
-            } else {
-                offsets.set(index, value.size);
-                setValue(this, index, value);
-            }
-        });
-    }
-}
diff --git a/js/src/builder/null.ts b/js/src/builder/null.ts
deleted file mode 100644
index 4be3f06..0000000
--- a/js/src/builder/null.ts
+++ /dev/null
@@ -1,29 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Null } from '../type';
-import { Builder } from '../builder';
-
-/** @ignore */
-export class NullBuilder<TNull = any> extends Builder<Null, TNull> {
-    // @ts-ignore
-    public setValue(index: number, value: null) {}
-    public setValid(index: number, valid: boolean) {
-        this.length = Math.max(index + 1, this.length);
-        return valid;
-    }
-}
diff --git a/js/src/builder/run.ts b/js/src/builder/run.ts
deleted file mode 100644
index 5239f51..0000000
--- a/js/src/builder/run.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Vector } from '../vector';
-import { DataType } from '../type';
-
-/** @ignore */
-export class Run<T extends DataType = any, TNull = any> {
-    protected _values!: ArrayLike<T['TValue'] | TNull>;
-    public get length() { return this._values.length; }
-    public get(index: number) { return this._values[index]; }
-    public clear() { this._values = <any> null; return this; }
-    public bind(values: Vector<T> | ArrayLike<T['TValue'] | TNull>) {
-        if (values instanceof Vector) {
-            return values;
-        }
-        this._values = values;
-        return this as any;
-    }
-}
diff --git a/js/src/builder/struct.ts b/js/src/builder/struct.ts
deleted file mode 100644
index 4d12336..0000000
--- a/js/src/builder/struct.ts
+++ /dev/null
@@ -1,29 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Field } from '../schema';
-import { Builder } from '../builder';
-import { DataType, Struct } from '../type';
-
-/** @ignore */
-export class StructBuilder<T extends { [key: string]: DataType } = any, TNull = any> extends Builder<Struct<T>, TNull> {
-    public addChild(child: Builder, name = `${this.numChildren}`) {
-        const childIndex = this.children.push(child);
-        this.type = new Struct([...this.type.children, new Field(name, child.type, true)]);
-        return childIndex;
-    }
-}
diff --git a/js/src/builder/time.ts b/js/src/builder/time.ts
deleted file mode 100644
index bfa71d2..0000000
--- a/js/src/builder/time.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { FixedWidthBuilder } from '../builder';
-import { Time, TimeSecond, TimeMillisecond, TimeMicrosecond, TimeNanosecond } from '../type';
-
-/** @ignore */
-export class TimeBuilder<T extends Time = Time, TNull = any> extends FixedWidthBuilder<T, TNull> {}
-/** @ignore */
-export class TimeSecondBuilder<TNull = any> extends TimeBuilder<TimeSecond, TNull> {}
-/** @ignore */
-export class TimeMillisecondBuilder<TNull = any> extends TimeBuilder<TimeMillisecond, TNull> {}
-/** @ignore */
-export class TimeMicrosecondBuilder<TNull = any> extends TimeBuilder<TimeMicrosecond, TNull> {}
-/** @ignore */
-export class TimeNanosecondBuilder<TNull = any> extends TimeBuilder<TimeNanosecond, TNull> {}
diff --git a/js/src/builder/timestamp.ts b/js/src/builder/timestamp.ts
deleted file mode 100644
index 49741e9..0000000
--- a/js/src/builder/timestamp.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { FixedWidthBuilder } from '../builder';
-import { Timestamp, TimestampSecond, TimestampMillisecond, TimestampMicrosecond, TimestampNanosecond } from '../type';
-
-/** @ignore */
-export class TimestampBuilder<T extends Timestamp = Timestamp, TNull = any> extends FixedWidthBuilder<T, TNull> {}
-/** @ignore */
-export class TimestampSecondBuilder<TNull = any> extends TimestampBuilder<TimestampSecond, TNull> {}
-/** @ignore */
-export class TimestampMillisecondBuilder<TNull = any> extends TimestampBuilder<TimestampMillisecond, TNull> {}
-/** @ignore */
-export class TimestampMicrosecondBuilder<TNull = any> extends TimestampBuilder<TimestampMicrosecond, TNull> {}
-/** @ignore */
-export class TimestampNanosecondBuilder<TNull = any> extends TimestampBuilder<TimestampNanosecond, TNull> {}
diff --git a/js/src/builder/union.ts b/js/src/builder/union.ts
deleted file mode 100644
index 18ac05b..0000000
--- a/js/src/builder/union.ts
+++ /dev/null
@@ -1,96 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Field } from '../schema';
-import { DataBufferBuilder } from './buffer';
-import { Builder, BuilderOptions } from '../builder';
-import { Union, SparseUnion, DenseUnion } from '../type';
-
-export interface UnionBuilderOptions<T extends Union = any, TNull = any> extends BuilderOptions<T, TNull> {
-    valueToChildTypeId?: (builder: UnionBuilder<T, TNull>, value: any, offset: number) => number;
-}
-
-/** @ignore */
-export abstract class UnionBuilder<T extends Union, TNull = any> extends Builder<T, TNull> {
-
-    protected _typeIds: DataBufferBuilder<Int8Array>;
-
-    constructor(options: UnionBuilderOptions<T, TNull>) {
-        super(options);
-        this._typeIds = new DataBufferBuilder(new Int8Array(0), 1);
-        if (typeof options['valueToChildTypeId'] === 'function') {
-            this._valueToChildTypeId = options['valueToChildTypeId'];
-        }
-    }
-
-    public get typeIdToChildIndex() { return this.type.typeIdToChildIndex; }
-
-    public append(value: T['TValue'] | TNull, childTypeId?: number) {
-        return this.set(this.length, value, childTypeId);
-    }
-
-    public set(index: number, value: T['TValue'] | TNull, childTypeId?: number) {
-        if (childTypeId === undefined) {
-            childTypeId = this._valueToChildTypeId(this, value, index);
-        }
-        if (this.setValid(index, this.isValid(value))) {
-            this.setValue(index, value, childTypeId);
-        }
-        return this;
-    }
-
-    public setValue(index: number, value: T['TValue'], childTypeId?: number) {
-        this._typeIds.set(index, childTypeId!);
-        super.setValue(index, value);
-    }
-
-    public addChild(child: Builder, name = `${this.children.length}`) {
-        const childTypeId = this.children.push(child);
-        const { type: { children, mode, typeIds } } = this;
-        const fields = [...children, new Field(name, child.type)];
-        this.type = <T> new Union(mode, [...typeIds, childTypeId], fields);
-        return childTypeId;
-    }
-
-    /** @ignore */
-    // @ts-ignore
-    protected _valueToChildTypeId(builder: UnionBuilder<T, TNull>, value: any, offset: number): number {
-        throw new Error(`Cannot map UnionBuilder value to child typeId. \
-Pass the \`childTypeId\` as the second argument to unionBuilder.append(), \
-or supply a \`valueToChildTypeId\` function as part of the UnionBuilder constructor options.`);
-    }
-}
-
-/** @ignore */
-export class SparseUnionBuilder<T extends SparseUnion, TNull = any> extends UnionBuilder<T, TNull> {}
-/** @ignore */
-export class DenseUnionBuilder<T extends DenseUnion, TNull = any> extends UnionBuilder<T, TNull> {
-
-    protected _offsets: DataBufferBuilder<Int32Array>;
-
-    constructor(options: UnionBuilderOptions<T, TNull>) {
-        super(options);
-        this._offsets = new DataBufferBuilder(new Int32Array(0));
-    }
-
-    /** @ignore */
-    public setValue(index: number, value: T['TValue'], childTypeId?: number) {
-        const childIndex = this.type.typeIdToChildIndex[childTypeId!];
-        this._offsets.set(index, this.getChildAt(childIndex)!.length);
-        return super.setValue(index, value, childTypeId);
-    }
-}
diff --git a/js/src/builder/utf8.ts b/js/src/builder/utf8.ts
deleted file mode 100644
index 7564cda..0000000
--- a/js/src/builder/utf8.ts
+++ /dev/null
@@ -1,44 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Utf8 } from '../type';
-import { encodeUtf8 } from '../util/utf8';
-import { BinaryBuilder } from './binary';
-import { BufferBuilder } from './buffer';
-import { VariableWidthBuilder, BuilderOptions } from '../builder';
-
-/** @ignore */
-export class Utf8Builder<TNull = any> extends VariableWidthBuilder<Utf8, TNull> {
-    constructor(opts: BuilderOptions<Utf8, TNull>) {
-        super(opts);
-        this._values = new BufferBuilder(new Uint8Array(0));
-    }
-    public get byteLength(): number {
-        let size = this._pendingLength + (this.length * 4);
-        this._offsets && (size += this._offsets.byteLength);
-        this._values && (size += this._values.byteLength);
-        this._nulls && (size += this._nulls.byteLength);
-        return size;
-    }
-    public setValue(index: number, value: string) {
-        return super.setValue(index, encodeUtf8(value) as any);
-    }
-    // @ts-ignore
-    protected _flushPending(pending: Map<number, Uint8Array | undefined>, pendingLength: number): void {}
-}
-
-(Utf8Builder.prototype as any)._flushPending = (BinaryBuilder.prototype as any)._flushPending;
diff --git a/js/src/builder/valid.ts b/js/src/builder/valid.ts
deleted file mode 100644
index ae5b799..0000000
--- a/js/src/builder/valid.ts
+++ /dev/null
@@ -1,77 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { DataType } from '../type';
-import { valueToString } from '../util/pretty';
-import { BigIntAvailable } from '../util/compat';
-
-/**
- * Dynamically compile the null values into an `isValid()` function whose
- * implementation is a switch statement. Microbenchmarks in v8 indicate
- * this approach is 25% faster than using an ES6 Map.
- *
- * @example
- * console.log(createIsValidFunction([null, 'N/A', NaN]));
- * `function (x) {
- *     if (x !== x) return false;
- *     switch (x) {
- *         case null:
- *         case "N/A":
- *             return false;
- *     }
- *     return true;
- * }`
- *
- * @ignore
- * @param nullValues
- */
-export function createIsValidFunction<T extends DataType = any, TNull = any>(nullValues?: ReadonlyArray<TNull>) {
-
-    if (!nullValues || nullValues.length <= 0) {
-        // @ts-ignore
-        return function isValid(value: any) { return true; };
-    }
-
-    let fnBody = '';
-    const noNaNs = nullValues.filter((x) => x === x);
-
-    if (noNaNs.length > 0) {
-        fnBody = `
-    switch (x) {${noNaNs.map((x) => `
-        case ${valueToCase(x)}:`).join('')}
-            return false;
-    }`;
-    }
-
-    // NaN doesn't equal anything including itself, so it doesn't work as a
-    // switch case. Instead we must explicitly check for NaN before the switch.
-    if (nullValues.length !== noNaNs.length) {
-        fnBody = `if (x !== x) return false;\n${fnBody}`;
-    }
-
-    return new Function(`x`, `${fnBody}\nreturn true;`) as (value: T['TValue'] | TNull) => boolean;
-}
-
-/** @ignore */
-function valueToCase(x: any) {
-    if (typeof x !== 'bigint') {
-        return valueToString(x);
-    } else if (BigIntAvailable) {
-        return `${valueToString(x)}n`;
-    }
-    return `"${valueToString(x)}"`;
-}
diff --git a/js/src/column.ts b/js/src/column.ts
deleted file mode 100644
index 48b40e5..0000000
--- a/js/src/column.ts
+++ /dev/null
@@ -1,136 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Data } from './data';
-import { Field } from './schema';
-import { DataType } from './type';
-import { Vector } from './vector';
-import { Clonable, Sliceable, Applicative } from './vector';
-import { VectorCtorArgs, VectorType as V } from './interfaces';
-import { Chunked, SearchContinuation } from './vector/chunked';
-
-export interface Column<T extends DataType = any> {
-    concat(...others: Vector<T>[]): Column<T>;
-    slice(begin?: number, end?: number): Column<T>;
-    clone(chunks?: Vector<T>[], offsets?: Uint32Array): Column<T>;
-}
-
-export class Column<T extends DataType = any>
-    extends Chunked<T>
-    implements Clonable<Column<T>>,
-               Sliceable<Column<T>>,
-               Applicative<T, Column<T>> {
-
-    public static new<T extends DataType>(data: Data<T>, ...args: VectorCtorArgs<V<T>>): Column<T>;
-    public static new<T extends DataType>(field: string | Field<T>, ...chunks: (Vector<T> | Vector<T>[])[]): Column<T>;
-    public static new<T extends DataType>(field: string | Field<T>, data: Data<T>, ...args: VectorCtorArgs<V<T>>): Column<T>;
-    /** @nocollapse */
-    public static new<T extends DataType = any>(...args: any[]) {
-
-        let [field, data, ...rest] = args as [
-            string | Field<T>,
-            Data<T> | Vector<T> | (Data<T> | Vector<T>)[],
-            ...any[]
-        ];
-
-        if (typeof field !== 'string' && !(field instanceof Field)) {
-            data = <Data<T> | Vector<T> | (Data<T> | Vector<T>)[]> field;
-            field = '';
-        }
-
-        const chunks = Chunked.flatten<T>(
-            Array.isArray(data) ? [...data, ...rest] :
-            data instanceof Vector ? [data, ...rest] :
-            [Vector.new(data, ...rest)]
-        );
-
-        if (typeof field === 'string') {
-            const type = chunks[0].data.type;
-            field = new Field(field, type, true);
-        } else if (!field.nullable && chunks.some(({ nullCount }) => nullCount > 0)) {
-            field = field.clone({ nullable: true });
-        }
-        return new Column(field, chunks);
-    }
-
-    constructor(field: Field<T>, vectors: Vector<T>[] = [], offsets?: Uint32Array) {
-        vectors = Chunked.flatten<T>(...vectors);
-        super(field.type, vectors, offsets);
-        this._field = field;
-        if (vectors.length === 1 && !(this instanceof SingleChunkColumn)) {
-            return new SingleChunkColumn(field, vectors[0], this._chunkOffsets);
-        }
-    }
-
-    protected _field: Field<T>;
-    protected _children?: Column[];
-
-    public get field() { return this._field; }
-    public get name() { return this._field.name; }
-    public get nullable() { return this._field.nullable; }
-    public get metadata() { return this._field.metadata; }
-
-    public clone(chunks = this._chunks) {
-        return new Column(this._field, chunks);
-    }
-
-    public getChildAt<R extends DataType = any>(index: number): Column<R> | null {
-
-        if (index < 0 || index >= this.numChildren) { return null; }
-
-        const columns = this._children || (this._children = []);
-        let column: Column<R>, field: Field<R>, chunks: Vector<R>[];
-
-        if (column = columns[index]) { return column; }
-        if (field = ((this.type.children || [])[index] as Field<R>)) {
-            chunks = this._chunks
-                .map((vector) => vector.getChildAt<R>(index))
-                .filter((vec): vec is Vector<R> => vec != null);
-            if (chunks.length > 0) {
-                return (columns[index] = new Column<R>(field, chunks));
-            }
-        }
-
-        return null;
-    }
-}
-
-/** @ignore */
-class SingleChunkColumn<T extends DataType = any> extends Column<T> {
-    protected _chunk: Vector<T>;
-    constructor(field: Field<T>, vector: Vector<T>, offsets?: Uint32Array) {
-        super(field, [vector], offsets);
-        this._chunk = vector;
-    }
-    public search(index: number): [number, number] | null;
-    public search<N extends SearchContinuation<Chunked<T>>>(index: number, then?: N): ReturnType<N>;
-    public search<N extends SearchContinuation<Chunked<T>>>(index: number, then?: N) {
-        return then ? then(this, 0, index) : [0, index];
-    }
-    public isValid(index: number): boolean {
-        return this._chunk.isValid(index);
-    }
-    public get(index: number): T['TValue'] | null {
-        return this._chunk.get(index);
-    }
-    public set(index: number, value: T['TValue'] | null): void {
-        this._chunk.set(index, value);
-    }
-    public indexOf(element: T['TValue'], offset?: number): number {
-        return this._chunk.indexOf(element, offset);
-    }
-}
diff --git a/js/src/compute/dataframe.ts b/js/src/compute/dataframe.ts
deleted file mode 100644
index ecebce0..0000000
--- a/js/src/compute/dataframe.ts
+++ /dev/null
@@ -1,283 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Table } from '../table';
-import { Vector } from '../vector';
-import { IntVector } from '../vector/int';
-import { Field, Schema } from '../schema';
-import { Predicate, Col, PredicateFunc } from './predicate';
-import { RecordBatch } from '../recordbatch';
-import { VectorType as V } from '../interfaces';
-import { DataType, Int, Struct, Dictionary } from '../type';
-
-/** @ignore */
-export type BindFunc = (batch: RecordBatch) => void;
-/** @ignore */
-export type NextFunc = (idx: number, batch: RecordBatch) => void;
-
-Table.prototype.countBy = function(this: Table, name: Col | string) { return new DataFrame(this.chunks).countBy(name); };
-Table.prototype.scan = function(this: Table, next: NextFunc, bind?: BindFunc) { return new DataFrame(this.chunks).scan(next, bind); };
-Table.prototype.scanReverse = function(this: Table, next: NextFunc, bind?: BindFunc) { return new DataFrame(this.chunks).scanReverse(next, bind); };
-Table.prototype.filter = function(this: Table, predicate: Predicate): FilteredDataFrame { return new DataFrame(this.chunks).filter(predicate); };
-
-export class DataFrame<T extends { [key: string]: DataType } = any> extends Table<T> {
-    public filter(predicate: Predicate): FilteredDataFrame<T> {
-        return new FilteredDataFrame<T>(this.chunks, predicate);
-    }
-    public scan(next: NextFunc, bind?: BindFunc) {
-        const batches = this.chunks, numBatches = batches.length;
-        for (let batchIndex = -1; ++batchIndex < numBatches;) {
-            // load batches
-            const batch = batches[batchIndex];
-            if (bind) { bind(batch); }
-            // yield all indices
-            for (let index = -1, numRows = batch.length; ++index < numRows;) {
-                next(index, batch);
-            }
-        }
-    }
-    public scanReverse(next: NextFunc, bind?: BindFunc) {
-        const batches = this.chunks, numBatches = batches.length;
-        for (let batchIndex = numBatches; --batchIndex >= 0;) {
-            // load batches
-            const batch = batches[batchIndex];
-            if (bind) { bind(batch); }
-            // yield all indices
-            for (let index = batch.length; --index >= 0;) {
-                next(index, batch);
-            }
-        }
-    }
-    public countBy(name: Col | string) {
-        const batches = this.chunks, numBatches = batches.length;
-        const count_by = typeof name === 'string' ? new Col(name) : name as Col;
-        // Assume that all dictionary batches are deltas, which means that the
-        // last record batch has the most complete dictionary
-        count_by.bind(batches[numBatches - 1]);
-        const vector = count_by.vector as V<Dictionary>;
-        if (!DataType.isDictionary(vector.type)) {
-            throw new Error('countBy currently only supports dictionary-encoded columns');
-        }
-
-        const countByteLength = Math.ceil(Math.log(vector.length) / Math.log(256));
-        const CountsArrayType = countByteLength == 4 ? Uint32Array :
-                                countByteLength >= 2 ? Uint16Array : Uint8Array;
-
-        const counts = new CountsArrayType(vector.dictionary.length);
-        for (let batchIndex = -1; ++batchIndex < numBatches;) {
-            // load batches
-            const batch = batches[batchIndex];
-            // rebind the countBy Col
-            count_by.bind(batch);
-            const keys = (count_by.vector as V<Dictionary>).indices;
-            // yield all indices
-            for (let index = -1, numRows = batch.length; ++index < numRows;) {
-                const key = keys.get(index);
-                if (key !== null) { counts[key]++; }
-            }
-        }
-        return new CountByResult(vector.dictionary, IntVector.from(counts));
-    }
-}
-
-/** @ignore */
-export class CountByResult<T extends DataType = any, TCount extends Int = Int> extends Table<{ values: T;  counts: TCount }> {
-    constructor(values: Vector<T>, counts: V<TCount>) {
-        type R = { values: T; counts: TCount };
-        const schema = new Schema<R>([
-            new Field('values', values.type),
-            new Field('counts', counts.type)
-        ]);
-        super(new RecordBatch<R>(schema, counts.length, [values, counts]));
-    }
-    public toJSON(): Record<string, unknown> {
-        const values = this.getColumnAt(0)!;
-        const counts = this.getColumnAt(1)!;
-        const result = {} as { [k: string]: number | null };
-        for (let i = -1; ++i < this.length;) {
-            result[values.get(i)] = counts.get(i);
-        }
-        return result;
-    }
-}
-
-/** @ignore */
-class FilteredBatchIterator<T extends { [key: string]: DataType }> implements IterableIterator<Struct<T>['TValue']> {
-    private batchIndex = 0;
-    private batch: RecordBatch<T>;
-    private index = 0;
-    private predicateFunc: PredicateFunc;
-
-    constructor(
-        private batches: RecordBatch<T>[],
-        private predicate: Predicate
-    ) {
-        // TODO: bind batches lazily
-        // If predicate doesn't match anything in the batch we don't need
-        // to bind the callback
-        this.batch = this.batches[this.batchIndex];
-        this.predicateFunc = this.predicate.bind(this.batch);
-    }
-
-    next(): IteratorResult<Struct<T>['TValue']> {
-        while (this.batchIndex < this.batches.length) {
-            while (this.index < this.batch.length) {
-                if (this.predicateFunc(this.index, this.batch)) {
-                    return {
-                        value: this.batch.get(this.index++) as any,
-                    };
-                }
-                this.index++;
-            }
-
-            if (++this.batchIndex < this.batches.length) {
-                this.index = 0;
-                this.batch = this.batches[this.batchIndex];
-                this.predicateFunc = this.predicate.bind(this.batch);
-            }
-        }
-
-        return {done: true, value: null};
-    }
-
-    [Symbol.iterator]() {
-        return this;
-    }
-}
-
-/** @ignore */
-export class FilteredDataFrame<T extends { [key: string]: DataType } = any> extends DataFrame<T> {
-    private _predicate: Predicate;
-    constructor (batches: RecordBatch<T>[], predicate: Predicate) {
-        super(batches);
-        this._predicate = predicate;
-    }
-    public scan(next: NextFunc, bind?: BindFunc) {
-        // inlined version of this:
-        // this.parent.scan((idx, columns) => {
-        //     if (this.predicate(idx, columns)) next(idx, columns);
-        // });
-        const batches = this._chunks;
-        const numBatches = batches.length;
-        for (let batchIndex = -1; ++batchIndex < numBatches;) {
-            // load batches
-            const batch = batches[batchIndex];
-            const predicate = this._predicate.bind(batch);
-            let isBound = false;
-            // yield all indices
-            for (let index = -1, numRows = batch.length; ++index < numRows;) {
-                if (predicate(index, batch)) {
-                    // bind batches lazily - if predicate doesn't match anything
-                    // in the batch we don't need to call bind on the batch
-                    if (bind && !isBound) {
-                        bind(batch);
-                        isBound = true;
-                    }
-                    next(index, batch);
-                }
-            }
-        }
-    }
-    public scanReverse(next: NextFunc, bind?: BindFunc) {
-        const batches = this._chunks;
-        const numBatches = batches.length;
-        for (let batchIndex = numBatches; --batchIndex >= 0;) {
-            // load batches
-            const batch = batches[batchIndex];
-            const predicate = this._predicate.bind(batch);
-            let isBound = false;
-            // yield all indices
-            for (let index = batch.length; --index >= 0;) {
-                if (predicate(index, batch)) {
-                    // bind batches lazily - if predicate doesn't match anything
-                    // in the batch we don't need to call bind on the batch
-                    if (bind && !isBound) {
-                        bind(batch);
-                        isBound = true;
-                    }
-                    next(index, batch);
-                }
-            }
-        }
-    }
-    public count(): number {
-        // inlined version of this:
-        // let sum = 0;
-        // this.parent.scan((idx, columns) => {
-        //     if (this.predicate(idx, columns)) ++sum;
-        // });
-        // return sum;
-        let sum = 0;
-        const batches = this._chunks;
-        const numBatches = batches.length;
-        for (let batchIndex = -1; ++batchIndex < numBatches;) {
-            // load batches
-            const batch = batches[batchIndex];
-            const predicate = this._predicate.bind(batch);
-            for (let index = -1, numRows = batch.length; ++index < numRows;) {
-                if (predicate(index, batch)) { ++sum; }
-            }
-        }
-        return sum;
-    }
-
-    public [Symbol.iterator](): IterableIterator<Struct<T>['TValue']> {
-        // inlined version of this:
-        // this.parent.scan((idx, columns) => {
-        //     if (this.predicate(idx, columns)) next(idx, columns);
-        // });
-        return new FilteredBatchIterator<T>(this._chunks, this._predicate);
-    }
-    public filter(predicate: Predicate): FilteredDataFrame<T> {
-        return new FilteredDataFrame<T>(
-            this._chunks,
-            this._predicate.and(predicate)
-        );
-    }
-    public countBy(name: Col | string) {
-        const batches = this._chunks, numBatches = batches.length;
-        const count_by = typeof name === 'string' ? new Col(name) : name as Col;
-        // Assume that all dictionary batches are deltas, which means that the
-        // last record batch has the most complete dictionary
-        count_by.bind(batches[numBatches - 1]);
-        const vector = count_by.vector as V<Dictionary>;
-        if (!DataType.isDictionary(vector.type)) {
-            throw new Error('countBy currently only supports dictionary-encoded columns');
-        }
-
-        const countByteLength = Math.ceil(Math.log(vector.length) / Math.log(256));
-        const CountsArrayType = countByteLength == 4 ? Uint32Array :
-                                countByteLength >= 2 ? Uint16Array : Uint8Array;
-
-        const counts = new CountsArrayType(vector.dictionary.length);
-
-        for (let batchIndex = -1; ++batchIndex < numBatches;) {
-            // load batches
-            const batch = batches[batchIndex];
-            const predicate = this._predicate.bind(batch);
-            // rebind the countBy Col
-            count_by.bind(batch);
-            const keys = (count_by.vector as V<Dictionary>).indices;
-            // yield all indices
-            for (let index = -1, numRows = batch.length; ++index < numRows;) {
-                const key = keys.get(index);
-                if (key !== null && predicate(index, batch)) { counts[key]++; }
-            }
-        }
-        return new CountByResult(vector.dictionary, IntVector.from(counts));
-    }
-}
diff --git a/js/src/compute/predicate.ts b/js/src/compute/predicate.ts
deleted file mode 100644
index 5203076..0000000
--- a/js/src/compute/predicate.ts
+++ /dev/null
@@ -1,292 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Vector } from '../vector';
-import { RecordBatch } from '../recordbatch';
-import { DictionaryVector } from '../vector/dictionary';
-
-/** @ignore */
-export type ValueFunc<T> = (idx: number, cols: RecordBatch) => T | null;
-/** @ignore */
-export type PredicateFunc = (idx: number, cols: RecordBatch) => boolean;
-
-/** @ignore */
-export abstract class Value<T> {
-    eq(other: Value<T> | T): Predicate {
-        if (!(other instanceof Value)) { other = new Literal(other); }
-        return new Equals(this, other);
-    }
-    le(other: Value<T> | T): Predicate {
-        if (!(other instanceof Value)) { other = new Literal(other); }
-        return new LTeq(this, other);
-    }
-    ge(other: Value<T> | T): Predicate {
-        if (!(other instanceof Value)) { other = new Literal(other); }
-        return new GTeq(this, other);
-    }
-    lt(other: Value<T> | T): Predicate {
-        return new Not(this.ge(other));
-    }
-    gt(other: Value<T> | T): Predicate {
-        return new Not(this.le(other));
-    }
-    ne(other: Value<T> | T): Predicate {
-        return new Not(this.eq(other));
-    }
-}
-
-/** @ignore */
-export class Literal<T= any> extends Value<T> {
-    constructor(public v: T) { super(); }
-}
-
-/** @ignore */
-export class Col<T= any> extends Value<T> {
-    public vector!: Vector;
-    public colidx!: number;
-
-    constructor(public name: string) { super(); }
-    bind(batch: RecordBatch): (idx: number, batch?: RecordBatch) => any {
-        if (!this.colidx) {
-            // Assume column index doesn't change between calls to bind
-            //this.colidx = cols.findIndex(v => v.name.indexOf(this.name) != -1);
-            this.colidx = -1;
-            const fields = batch.schema.fields;
-            for (let idx = -1; ++idx < fields.length;) {
-                if (fields[idx].name === this.name) {
-                    this.colidx = idx;
-                    break;
-                }
-            }
-            if (this.colidx < 0) { throw new Error(`Failed to bind Col "${this.name}"`); }
-        }
-
-        const vec = this.vector = batch.getChildAt(this.colidx)!;
-        return (idx: number) => vec.get(idx);
-    }
-}
-
-/** @ignore */
-export abstract class Predicate {
-    abstract bind(batch: RecordBatch): PredicateFunc;
-    and(...expr: Predicate[]): And { return new And(this, ...expr); }
-    or(...expr: Predicate[]): Or { return new Or(this, ...expr); }
-    not(): Predicate { return new Not(this); }
-}
-
-/** @ignore */
-export abstract class ComparisonPredicate<T= any> extends Predicate {
-    constructor(public readonly left: Value<T>, public readonly right: Value<T>) {
-        super();
-    }
-
-    bind(batch: RecordBatch) {
-        if (this.left instanceof Literal) {
-            if (this.right instanceof Literal) {
-                return this._bindLitLit(batch, this.left, this.right);
-            } else { // right is a Col
-
-                return this._bindLitCol(batch, this.left, this.right as Col);
-            }
-        } else { // left is a Col
-            if (this.right instanceof Literal) {
-                return this._bindColLit(batch, this.left as Col, this.right);
-            } else { // right is a Col
-                return this._bindColCol(batch, this.left as Col, this.right as Col);
-            }
-        }
-    }
-
-    protected abstract _bindLitLit(batch: RecordBatch, left: Literal, right: Literal): PredicateFunc;
-    protected abstract _bindColCol(batch: RecordBatch, left: Col, right: Col): PredicateFunc;
-    protected abstract _bindColLit(batch: RecordBatch, col: Col, lit: Literal): PredicateFunc;
-    protected abstract _bindLitCol(batch: RecordBatch, lit: Literal, col: Col): PredicateFunc;
-}
-
-/** @ignore */
-export abstract class CombinationPredicate extends Predicate {
-    readonly children: Predicate[];
-    constructor(...children: Predicate[]) {
-        super();
-        this.children = children;
-    }
-}
-// add children to prototype so it doesn't get mangled in es2015/umd
-(<any> CombinationPredicate.prototype).children = Object.freeze([]); // freeze for safety
-
-/** @ignore */
-export class And extends CombinationPredicate {
-    constructor(...children: Predicate[]) {
-        // Flatten any Ands
-        children = children.reduce((accum: Predicate[], p: Predicate): Predicate[] => {
-            return accum.concat(p instanceof And ? p.children : p);
-        }, []);
-        super(...children);
-    }
-    bind(batch: RecordBatch) {
-        const bound = this.children.map((p) => p.bind(batch));
-        return (idx: number, batch: RecordBatch) => bound.every((p) => p(idx, batch));
-    }
-}
-
-/** @ignore */
-export class Or extends CombinationPredicate {
-    constructor(...children: Predicate[]) {
-        // Flatten any Ors
-        children = children.reduce((accum: Predicate[], p: Predicate): Predicate[] => {
-            return accum.concat(p instanceof Or ? p.children : p);
-        }, []);
-        super(...children);
-    }
-    bind(batch: RecordBatch) {
-        const bound = this.children.map((p) => p.bind(batch));
-        return (idx: number, batch: RecordBatch) => bound.some((p) => p(idx, batch));
-    }
-}
-
-/** @ignore */
-export class Equals extends ComparisonPredicate {
-    // Helpers used to cache dictionary reverse lookups between calls to bind
-    private lastDictionary: Vector|undefined;
-    private lastKey: number|undefined;
-
-    protected _bindLitLit(_batch: RecordBatch, left: Literal, right: Literal): PredicateFunc {
-        const rtrn: boolean = left.v == right.v;
-        return () => rtrn;
-    }
-
-    protected _bindColCol(batch: RecordBatch, left: Col, right: Col): PredicateFunc {
-        const left_func = left.bind(batch);
-        const right_func = right.bind(batch);
-        return (idx: number, batch: RecordBatch) => left_func(idx, batch) == right_func(idx, batch);
-    }
-
-    protected _bindColLit(batch: RecordBatch, col: Col, lit: Literal): PredicateFunc {
-        const col_func = col.bind(batch);
-        if (col.vector instanceof DictionaryVector) {
-            let key: any;
-            const vector = col.vector as DictionaryVector;
-            if (vector.dictionary !== this.lastDictionary) {
-                key = vector.reverseLookup(lit.v);
-                this.lastDictionary = vector.dictionary;
-                this.lastKey = key;
-            } else {
-                key = this.lastKey;
-            }
-
-            if (key === -1) {
-                // the value doesn't exist in the dictionary - always return
-                // false
-                // TODO: special-case of PredicateFunc that encapsulates this
-                // "always false" behavior. That way filtering operations don't
-                // have to bother checking
-                return () => false;
-            } else {
-                return (idx: number) => {
-                    return vector.getKey(idx) === key;
-                };
-            }
-        } else {
-            return (idx: number, cols: RecordBatch) => col_func(idx, cols) == lit.v;
-        }
-    }
-
-    protected _bindLitCol(batch: RecordBatch, lit: Literal, col: Col) {
-        // Equals is commutative
-        return this._bindColLit(batch, col, lit);
-    }
-}
-
-/** @ignore */
-export class LTeq extends ComparisonPredicate {
-    protected _bindLitLit(_batch: RecordBatch, left: Literal, right: Literal): PredicateFunc {
-        const rtrn: boolean = left.v <= right.v;
-        return () => rtrn;
-    }
-
-    protected _bindColCol(batch: RecordBatch, left: Col, right: Col): PredicateFunc {
-        const left_func = left.bind(batch);
-        const right_func = right.bind(batch);
-        return (idx: number, cols: RecordBatch) => left_func(idx, cols) <= right_func(idx, cols);
-    }
-
-    protected _bindColLit(batch: RecordBatch, col: Col, lit: Literal): PredicateFunc {
-        const col_func = col.bind(batch);
-        return (idx: number, cols: RecordBatch) => col_func(idx, cols) <= lit.v;
-    }
-
-    protected _bindLitCol(batch: RecordBatch, lit: Literal, col: Col) {
-        const col_func = col.bind(batch);
-        return (idx: number, cols: RecordBatch) => lit.v <= col_func(idx, cols);
-    }
-}
-
-/** @ignore */
-export class GTeq extends ComparisonPredicate {
-    protected _bindLitLit(_batch: RecordBatch, left: Literal, right: Literal): PredicateFunc {
-        const rtrn: boolean = left.v >= right.v;
-        return () => rtrn;
-    }
-
-    protected _bindColCol(batch: RecordBatch, left: Col, right: Col): PredicateFunc {
-        const left_func = left.bind(batch);
-        const right_func = right.bind(batch);
-        return (idx: number, cols: RecordBatch) => left_func(idx, cols) >= right_func(idx, cols);
-    }
-
-    protected _bindColLit(batch: RecordBatch, col: Col, lit: Literal): PredicateFunc {
-        const col_func = col.bind(batch);
-        return (idx: number, cols: RecordBatch) => col_func(idx, cols) >= lit.v;
-    }
-
-    protected _bindLitCol(batch: RecordBatch, lit: Literal, col: Col) {
-        const col_func = col.bind(batch);
-        return (idx: number, cols: RecordBatch) => lit.v >= col_func(idx, cols);
-    }
-}
-
-/** @ignore */
-export class Not extends Predicate {
-    constructor(public readonly child: Predicate) {
-        super();
-    }
-
-    bind(batch: RecordBatch) {
-        const func = this.child.bind(batch);
-        return (idx: number, batch: RecordBatch) => !func(idx, batch);
-    }
-}
-
-/** @ignore */
-export class CustomPredicate extends Predicate {
-    constructor(private next: PredicateFunc, private bind_: (batch: RecordBatch) => void) {
-        super();
-    }
-
-    bind(batch: RecordBatch) {
-        this.bind_(batch);
-        return this.next;
-    }
-}
-
-export function lit(v: any): Value<any> { return new Literal(v); }
-export function col(n: string): Col<any> { return new Col(n); }
-export function and(...p: Predicate[]): And { return new And(...p); }
-export function or(...p: Predicate[]): Or { return new Or(...p); }
-export function custom(next: PredicateFunc, bind: (batch: RecordBatch) => void) {
-    return new CustomPredicate(next, bind);
-}
diff --git a/js/src/data.ts b/js/src/data.ts
deleted file mode 100644
index 2a54908..0000000
--- a/js/src/data.ts
+++ /dev/null
@@ -1,295 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { Vector } from './vector';
-import { truncateBitmap } from './util/bit';
-import { popcnt_bit_range } from './util/bit';
-import { BufferType, UnionMode, Type } from './enum';
-import { DataType, SparseUnion, DenseUnion, strideForType } from './type';
-import { toArrayBufferView, toUint8Array, toInt32Array } from './util/buffer';
-import {
-    Dictionary,
-    Null, Int, Float,
-    Binary, Bool, Utf8, Decimal,
-    Date_, Time, Timestamp, Interval,
-    List, Struct, Union, FixedSizeBinary, FixedSizeList, Map_,
-} from './type';
-
-// When slicing, we do not know the null count of the sliced range without
-// doing some computation. To avoid doing this eagerly, we set the null count
-// to -1 (any negative number will do). When Vector.nullCount is called the
-// first time, the null count will be computed. See ARROW-33
-/** @ignore */ export type kUnknownNullCount = -1;
-/** @ignore */ export const kUnknownNullCount = -1;
-
-/** @ignore */ export type NullBuffer = Uint8Array | null | undefined;
-/** @ignore */ export type TypeIdsBuffer = Int8Array  | ArrayLike<number> | Iterable<number> | undefined;
-/** @ignore */ export type ValueOffsetsBuffer = Int32Array  | ArrayLike<number> | Iterable<number> | undefined;
-/** @ignore */ export type DataBuffer<T extends DataType> = T['TArray'] | ArrayLike<number> | Iterable<number> | undefined;
-
-/** @ignore */
-export interface Buffers<T extends DataType> {
-      [BufferType.OFFSET]: Int32Array;
-        [BufferType.DATA]: T['TArray'];
-    [BufferType.VALIDITY]: Uint8Array;
-        [BufferType.TYPE]: T['TArray'];
-}
-
-/** @ignore */
-export interface Data<T extends DataType = DataType> {
-    readonly TType: T['TType'];
-    readonly TArray: T['TArray'];
-    readonly TValue: T['TValue'];
-}
-
-/** @ignore */
-export class Data<T extends DataType = DataType> {
-
-    public readonly type: T;
-    public readonly length: number;
-    public readonly offset: number;
-    public readonly stride: number;
-    public readonly childData: Data[];
-
-    /**
-     * The dictionary for this Vector, if any. Only used for Dictionary type.
-     */
-    public dictionary?: Vector;
-
-    public readonly values!: Buffers<T>[BufferType.DATA];
-    public readonly typeIds!: Buffers<T>[BufferType.TYPE];
-    public readonly nullBitmap!: Buffers<T>[BufferType.VALIDITY];
-    public readonly valueOffsets!: Buffers<T>[BufferType.OFFSET];
-
-    public get typeId(): T['TType'] { return this.type.typeId; }
-    public get ArrayType(): T['ArrayType'] { return this.type.ArrayType; }
-    public get buffers() {
-        return [this.valueOffsets, this.values, this.nullBitmap, this.typeIds] as Buffers<T>;
-    }
-    public get byteLength(): number {
-        let byteLength = 0;
-        const { valueOffsets, values, nullBitmap, typeIds } = this;
-        valueOffsets && (byteLength += valueOffsets.byteLength);
-        values       && (byteLength += values.byteLength);
-        nullBitmap   && (byteLength += nullBitmap.byteLength);
-        typeIds      && (byteLength += typeIds.byteLength);
-        return this.childData.reduce((byteLength, child) => byteLength + child.byteLength, byteLength);
-    }
-
-    protected _nullCount: number | kUnknownNullCount;
-
-    public get nullCount() {
-        let nullCount = this._nullCount;
-        let nullBitmap: Uint8Array | undefined;
-        if (nullCount <= kUnknownNullCount && (nullBitmap = this.nullBitmap)) {
-            this._nullCount = nullCount = this.length - popcnt_bit_range(nullBitmap, this.offset, this.offset + this.length);
-        }
-        return nullCount;
-    }
-
-    constructor(type: T, offset: number, length: number, nullCount?: number, buffers?: Partial<Buffers<T>> | Data<T>, childData?: (Data | Vector)[], dictionary?: Vector) {
-        this.type = type;
-        this.dictionary = dictionary;
-        this.offset = Math.floor(Math.max(offset || 0, 0));
-        this.length = Math.floor(Math.max(length || 0, 0));
-        this._nullCount = Math.floor(Math.max(nullCount || 0, -1));
-        this.childData = (childData || []).map((x) => x instanceof Data ? x : x.data) as Data[];
-        let buffer: Buffers<T>[keyof Buffers<T>];
-        if (buffers instanceof Data) {
-            this.stride = buffers.stride;
-            this.values = buffers.values;
-            this.typeIds = buffers.typeIds;
-            this.nullBitmap = buffers.nullBitmap;
-            this.valueOffsets = buffers.valueOffsets;
-        } else {
-            this.stride = strideForType(type);
-            if (buffers) {
-                (buffer = (buffers as Buffers<T>)[0]) && (this.valueOffsets = buffer);
-                (buffer = (buffers as Buffers<T>)[1]) && (this.values = buffer);
-                (buffer = (buffers as Buffers<T>)[2]) && (this.nullBitmap = buffer);
-                (buffer = (buffers as Buffers<T>)[3]) && (this.typeIds = buffer);
-            }
-        }
-    }
-
-    public clone<R extends DataType>(type: R, offset = this.offset, length = this.length, nullCount = this._nullCount, buffers: Buffers<R> = <any> this, childData: (Data | Vector)[] = this.childData) {
-        return new Data(type, offset, length, nullCount, buffers, childData, this.dictionary);
-    }
-
-    public slice(offset: number, length: number): Data<T> {
-        const { stride, typeId, childData } = this;
-        // +true === 1, +false === 0, so this means
-        // we keep nullCount at 0 if it's already 0,
-        // otherwise set to the invalidated flag -1
-        const nullCount = +(this._nullCount === 0) - 1;
-        const childStride = typeId === 16 /* FixedSizeList */ ? stride : 1;
-        const buffers = this._sliceBuffers(offset, length, stride, typeId);
-        return this.clone<T>(this.type, this.offset + offset, length, nullCount, buffers,
-            // Don't slice children if we have value offsets (the variable-width types)
-            (!childData.length || this.valueOffsets) ? childData : this._sliceChildren(childData, childStride * offset, childStride * length));
-    }
-
-    public _changeLengthAndBackfillNullBitmap(newLength: number): Data<T> {
-        if (this.typeId === Type.Null) {
-            return this.clone(this.type, 0, newLength, 0);
-        }
-        const { length, nullCount } = this;
-        // start initialized with 0s (nulls), then fill from 0 to length with 1s (not null)
-        const bitmap = new Uint8Array(((newLength + 63) & ~63) >> 3).fill(255, 0, length >> 3);
-        // set all the bits in the last byte (up to bit `length - length % 8`) to 1 (not null)
-        bitmap[length >> 3] = (1 << (length - (length & ~7))) - 1;
-        // if we have a nullBitmap, truncate + slice and set it over the pre-filled 1s
-        if (nullCount > 0) {
-            bitmap.set(truncateBitmap(this.offset, length, this.nullBitmap), 0);
-        }
-        const buffers = this.buffers;
-        buffers[BufferType.VALIDITY] = bitmap;
-        return this.clone(this.type, 0, newLength, nullCount + (newLength - length), buffers);
-    }
-
-    protected _sliceBuffers(offset: number, length: number, stride: number, typeId: T['TType']): Buffers<T> {
-        let arr: any;
-        const { buffers } = this;
-        // If typeIds exist, slice the typeIds buffer
-        (arr = buffers[BufferType.TYPE]) && (buffers[BufferType.TYPE] = arr.subarray(offset, offset + length));
-        // If offsets exist, only slice the offsets buffer
-        (arr = buffers[BufferType.OFFSET]) && (buffers[BufferType.OFFSET] = arr.subarray(offset, offset + length + 1)) ||
-        // Otherwise if no offsets, slice the data buffer. Don't slice the data vector for Booleans, since the offset goes by bits not bytes
-        (arr = buffers[BufferType.DATA]) && (buffers[BufferType.DATA] = typeId === 6 ? arr : arr.subarray(stride * offset, stride * (offset + length)));
-        return buffers;
-    }
-
-    protected _sliceChildren(childData: Data[], offset: number, length: number): Data[] {
-        return childData.map((child) => child.slice(offset, length));
-    }
-
-    //
-    // Convenience methods for creating Data instances for each of the Arrow Vector types
-    //
-    /** @nocollapse */
-    public static new<T extends DataType>(type: T, offset: number, length: number, nullCount?: number, buffers?: Partial<Buffers<T>> | Data<T>, childData?: (Data | Vector)[], dictionary?: Vector): Data<T> {
-        if (buffers instanceof Data) { buffers = buffers.buffers; } else if (!buffers) { buffers = [] as Partial<Buffers<T>>; }
-        switch (type.typeId) {
-            case Type.Null:            return <unknown> Data.Null(            <unknown> type as Null,            offset, length) as Data<T>;
-            case Type.Int:             return <unknown> Data.Int(             <unknown> type as Int,             offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.Dictionary:      return <unknown> Data.Dictionary(      <unknown> type as Dictionary,      offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.DATA] || [], dictionary!) as Data<T>;
-            case Type.Float:           return <unknown> Data.Float(           <unknown> type as Float,           offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.Bool:            return <unknown> Data.Bool(            <unknown> type as Bool,            offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.Decimal:         return <unknown> Data.Decimal(         <unknown> type as Decimal,         offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.Date:            return <unknown> Data.Date(            <unknown> type as Date_,           offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.Time:            return <unknown> Data.Time(            <unknown> type as Time,            offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.Timestamp:       return <unknown> Data.Timestamp(       <unknown> type as Timestamp,       offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.Interval:        return <unknown> Data.Interval(        <unknown> type as Interval,        offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.FixedSizeBinary: return <unknown> Data.FixedSizeBinary( <unknown> type as FixedSizeBinary, offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.Binary:          return <unknown> Data.Binary(          <unknown> type as Binary,          offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.OFFSET] || [], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.Utf8:            return <unknown> Data.Utf8(            <unknown> type as Utf8,            offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.OFFSET] || [], buffers[BufferType.DATA] || []) as Data<T>;
-            case Type.List:            return <unknown> Data.List(            <unknown> type as List,            offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.OFFSET] || [], (childData || [])[0]) as Data<T>;
-            case Type.FixedSizeList:   return <unknown> Data.FixedSizeList(   <unknown> type as FixedSizeList,   offset, length, nullCount || 0, buffers[BufferType.VALIDITY], (childData || [])[0]) as Data<T>;
-            case Type.Struct:          return <unknown> Data.Struct(          <unknown> type as Struct,          offset, length, nullCount || 0, buffers[BufferType.VALIDITY], childData || []) as Data<T>;
-            case Type.Map:             return <unknown> Data.Map(             <unknown> type as Map_,            offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.OFFSET] || [], (childData || [])[0]) as Data<T>;
-            case Type.Union:           return <unknown> Data.Union(           <unknown> type as Union,           offset, length, nullCount || 0, buffers[BufferType.VALIDITY], buffers[BufferType.TYPE] || [], buffers[BufferType.OFFSET] || childData, childData) as Data<T>;
-        }
-        throw new Error(`Unrecognized typeId ${type.typeId}`);
-    }
-
-    /** @nocollapse */
-    public static Null<T extends Null>(type: T, offset: number, length: number) {
-        return new Data(type, offset, length, 0);
-    }
-    /** @nocollapse */
-    public static Int<T extends Int>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static Dictionary<T extends Dictionary>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>, dictionary: Vector<T['dictionary']>) {
-        return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView<T['TArray']>(type.indices.ArrayType, data), toUint8Array(nullBitmap)], [], dictionary);
-    }
-    /** @nocollapse */
-    public static Float<T extends Float>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static Bool<T extends Bool>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static Decimal<T extends Decimal>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static Date<T extends Date_>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static Time<T extends Time>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static Timestamp<T extends Timestamp>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static Interval<T extends Interval>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static FixedSizeBinary<T extends FixedSizeBinary>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [undefined, toArrayBufferView(type.ArrayType, data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static Binary<T extends Binary>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, valueOffsets: ValueOffsetsBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [toInt32Array(valueOffsets), toUint8Array(data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static Utf8<T extends Utf8>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, valueOffsets: ValueOffsetsBuffer, data: DataBuffer<T>) {
-        return new Data(type, offset, length, nullCount, [toInt32Array(valueOffsets), toUint8Array(data), toUint8Array(nullBitmap)]);
-    }
-    /** @nocollapse */
-    public static List<T extends List>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, valueOffsets: ValueOffsetsBuffer, child: Data<T['valueType']> | Vector<T['valueType']>) {
-        return new Data(type, offset, length, nullCount, [toInt32Array(valueOffsets), undefined, toUint8Array(nullBitmap)], child ? [child] : []);
-    }
-    /** @nocollapse */
-    public static FixedSizeList<T extends FixedSizeList>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, child: Data<T['valueType']> | Vector<T['valueType']>) {
-        return new Data(type, offset, length, nullCount, [undefined, undefined, toUint8Array(nullBitmap)], child ? [child] : []);
-    }
-    /** @nocollapse */
-    public static Struct<T extends Struct>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, children: (Data | Vector)[]) {
-        return new Data(type, offset, length, nullCount, [undefined, undefined, toUint8Array(nullBitmap)], children);
-    }
-    /** @nocollapse */
-    public static Map<T extends Map_>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, valueOffsets: ValueOffsetsBuffer, child: (Data | Vector)) {
-        return new Data(type, offset, length, nullCount, [toInt32Array(valueOffsets), undefined, toUint8Array(nullBitmap)], child ? [child] : []);
-    }
-    public static Union<T extends SparseUnion>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, typeIds: TypeIdsBuffer, children: (Data | Vector)[], _?: any): Data<T>;
-    public static Union<T extends DenseUnion>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, typeIds: TypeIdsBuffer, valueOffsets: ValueOffsetsBuffer, children: (Data | Vector)[]): Data<T>;
-    public static Union<T extends Union>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, typeIds: TypeIdsBuffer, valueOffsetsOrChildren: ValueOffsetsBuffer | (Data | Vector)[], children?: (Data | Vector)[]): Data<T>;
-    /** @nocollapse */
-    public static Union<T extends Union>(type: T, offset: number, length: number, nullCount: number, nullBitmap: NullBuffer, typeIds: TypeIdsBuffer, valueOffsetsOrChildren: ValueOffsetsBuffer | (Data | Vector)[], children?: (Data | Vector)[]) {
-        const buffers = <unknown> [
-            undefined, undefined,
-            toUint8Array(nullBitmap),
-            toArrayBufferView(type.ArrayType, typeIds)
-        ] as Partial<Buffers<T>>;
-        if (type.mode === UnionMode.Sparse) {
-            return new Data(type, offset, length, nullCount, buffers, valueOffsetsOrChildren as (Data | Vector)[]);
-        }
-        buffers[BufferType.OFFSET] = toInt32Array(<ValueOffsetsBuffer> valueOffsetsOrChildren);
-        return new Data(type, offset, length, nullCount, buffers, children);
-    }
-}
-
-(Data.prototype as any).childData = Object.freeze([]);
diff --git a/js/src/enum.ts b/js/src/enum.ts
deleted file mode 100644
index 517aa27..0000000
--- a/js/src/enum.ts
+++ /dev/null
@@ -1,142 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-export {
-    DateUnit,
-    TimeUnit,
-    Precision,
-    UnionMode,
-    IntervalUnit,
-    MetadataVersion,
-} from './fb/Schema';
-
-export { MessageHeader } from './fb/Message';
-
-/**
- * Main data type enumeration.
- *
- * Data types in this library are all *logical*. They can be expressed as
- * either a primitive physical type (bytes or bits of some fixed size), a
- * nested type consisting of other data types, or another data type (e.g. a
- * timestamp encoded as an int64).
- *
- * **Note**: Only enum values 0-17 (NONE through Map) are written to an Arrow
- * IPC payload.
- *
- * The rest of the values are specified here so TypeScript can narrow the type
- * signatures further beyond the base Arrow Types. The Arrow DataTypes include
- * metadata like `bitWidth` that impact the type signatures of the values we
- * accept and return.
- *
- * For example, the `Int8Vector` reads 1-byte numbers from an `Int8Array`, an
- * `Int32Vector` reads a 4-byte number from an `Int32Array`, and an `Int64Vector`
- * reads a pair of 4-byte lo, hi 32-bit integers as a zero-copy slice from the
- * underlying `Int32Array`.
- *
- * Library consumers benefit by knowing the narrowest type, since we can ensure
- * the types across all public methods are propagated, and never bail to `any`.
- * These values are _never_ used at runtime, and they will _never_ be written
- * to the flatbuffers metadata of serialized Arrow IPC payloads.
- */
-export enum Type {
-    /** The default placeholder type */
-    NONE            =  0,
-    /** A NULL type having no physical storage */
-    Null            =  1,
-    /** Signed or unsigned 8, 16, 32, or 64-bit little-endian integer */
-    Int             =  2,
-    /** 2, 4, or 8-byte floating point value */
-    Float           =  3,
-    /** Variable-length bytes (no guarantee of UTF8-ness) */
-    Binary          =  4,
-    /** UTF8 variable-length string as List<Char> */
-    Utf8            =  5,
-    /** Boolean as 1 bit, LSB bit-packed ordering */
-    Bool            =  6,
-    /** Precision-and-scale-based decimal type. Storage type depends on the parameters. */
-    Decimal         =  7,
-    /** int32_t days or int64_t milliseconds since the UNIX epoch */
-    Date            =  8,
-    /** Time as signed 32 or 64-bit integer, representing either seconds, milliseconds, microseconds, or nanoseconds since midnight since midnight */
-    Time            =  9,
-    /** Exact timestamp encoded with int64 since UNIX epoch (Default unit millisecond) */
-    Timestamp       = 10,
-    /** YEAR_MONTH or DAY_TIME interval in SQL style */
-    Interval        = 11,
-    /** A list of some logical data type */
-    List            = 12,
-    /** Struct of logical types */
-    Struct          = 13,
-    /** Union of logical types */
-    Union           = 14,
-    /** Fixed-size binary. Each value occupies the same number of bytes */
-    FixedSizeBinary = 15,
-    /** Fixed-size list. Each value occupies the same number of bytes */
-    FixedSizeList   = 16,
-    /** Map of named logical types */
-    Map             = 17,
-
-    /** Dictionary aka Category type */
-    Dictionary            = -1,
-    Int8                  = -2,
-    Int16                 = -3,
-    Int32                 = -4,
-    Int64                 = -5,
-    Uint8                 = -6,
-    Uint16                = -7,
-    Uint32                = -8,
-    Uint64                = -9,
-    Float16               = -10,
-    Float32               = -11,
-    Float64               = -12,
-    DateDay               = -13,
-    DateMillisecond       = -14,
-    TimestampSecond       = -15,
-    TimestampMillisecond  = -16,
-    TimestampMicrosecond  = -17,
-    TimestampNanosecond   = -18,
-    TimeSecond            = -19,
-    TimeMillisecond       = -20,
-    TimeMicrosecond       = -21,
-    TimeNanosecond        = -22,
-    DenseUnion            = -23,
-    SparseUnion           = -24,
-    IntervalDayTime       = -25,
-    IntervalYearMonth     = -26,
-}
-
-export enum BufferType {
-    /**
-     * used in List type, Dense Union and variable length primitive types (String, Binary)
-     */
-    OFFSET = 0,
-
-    /**
-     * actual data, either wixed width primitive types in slots or variable width delimited by an OFFSET vector
-     */
-    DATA = 1,
-
-    /**
-     * Bit vector indicating if each value is null
-     */
-    VALIDITY = 2,
-
-    /**
-     * Type vector used in Union type
-     */
-    TYPE = 3
-  }
diff --git a/js/src/fb/.eslintrc.js b/js/src/fb/.eslintrc.js
deleted file mode 100644
index d448540..0000000
--- a/js/src/fb/.eslintrc.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-module.exports = {
-    rules: {
-        "@typescript-eslint/no-require-imports": "off",
-        "@typescript-eslint/no-inferrable-types": "off"
-    },
-};
\ No newline at end of file
diff --git a/js/src/fb/File.ts b/js/src/fb/File.ts
deleted file mode 100644
index 5746dd1..0000000
--- a/js/src/fb/File.ts
+++ /dev/null
@@ -1,300 +0,0 @@
-// automatically generated by the FlatBuffers compiler, do not modify
-
-import { flatbuffers } from 'flatbuffers';
-import * as NS13596923344997147894 from './Schema';
-/**
- * ----------------------------------------------------------------------
- * Arrow File metadata
- *
- *
- * @constructor
- */
-export class Footer {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Footer
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Footer {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Footer= obj
-     * @returns Footer
-     */
-    static getRootAsFooter(bb: flatbuffers.ByteBuffer, obj?: Footer): Footer {
-        return (obj || new Footer()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Footer= obj
-     * @returns Footer
-     */
-    static getSizePrefixedRootAsFooter(bb: flatbuffers.ByteBuffer, obj?: Footer): Footer {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Footer()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @returns MetadataVersion
-     */
-    version(): NS13596923344997147894.MetadataVersion {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? /**  */ (this.bb!.readInt16(this.bb_pos + offset)) : NS13596923344997147894.MetadataVersion.V1;
-    }
-
-    /**
-     * @param Schema= obj
-     * @returns Schema|null
-     */
-    schema(obj?: NS13596923344997147894.Schema): NS13596923344997147894.Schema | null {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? (obj || new NS13596923344997147894.Schema()).__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) : null;
-    }
-
-    /**
-     * @param number index
-     * @param Block= obj
-     * @returns Block
-     */
-    dictionaries(index: number, obj?: Block): Block | null {
-        const offset = this.bb!.__offset(this.bb_pos, 8);
-        return offset ? (obj || new Block()).__init(this.bb!.__vector(this.bb_pos + offset) + index * 24, this.bb!) : null;
-    }
-
-    /**
-     * @returns number
-     */
-    dictionariesLength(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 8);
-        return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * @param number index
-     * @param Block= obj
-     * @returns Block
-     */
-    recordBatches(index: number, obj?: Block): Block | null {
-        const offset = this.bb!.__offset(this.bb_pos, 10);
-        return offset ? (obj || new Block()).__init(this.bb!.__vector(this.bb_pos + offset) + index * 24, this.bb!) : null;
-    }
-
-    /**
-     * @returns number
-     */
-    recordBatchesLength(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 10);
-        return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * User-defined metadata
-     *
-     * @param number index
-     * @param KeyValue= obj
-     * @returns KeyValue
-     */
-    customMetadata(index: number, obj?: NS13596923344997147894.KeyValue): NS13596923344997147894.KeyValue | null {
-        const offset = this.bb!.__offset(this.bb_pos, 12);
-        return offset ? (obj || new NS13596923344997147894.KeyValue()).__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) : null;
-    }
-
-    /**
-     * @returns number
-     */
-    customMetadataLength(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 12);
-        return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startFooter(builder: flatbuffers.Builder) {
-        builder.startObject(5);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param MetadataVersion version
-     */
-    static addVersion(builder: flatbuffers.Builder, version: NS13596923344997147894.MetadataVersion) {
-        builder.addFieldInt16(0, version, NS13596923344997147894.MetadataVersion.V1);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset schemaOffset
-     */
-    static addSchema(builder: flatbuffers.Builder, schemaOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(1, schemaOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset dictionariesOffset
-     */
-    static addDictionaries(builder: flatbuffers.Builder, dictionariesOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(2, dictionariesOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number numElems
-     */
-    static startDictionariesVector(builder: flatbuffers.Builder, numElems: number) {
-        builder.startVector(24, numElems, 8);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset recordBatchesOffset
-     */
-    static addRecordBatches(builder: flatbuffers.Builder, recordBatchesOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(3, recordBatchesOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number numElems
-     */
-    static startRecordBatchesVector(builder: flatbuffers.Builder, numElems: number) {
-        builder.startVector(24, numElems, 8);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset customMetadataOffset
-     */
-    static addCustomMetadata(builder: flatbuffers.Builder, customMetadataOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(4, customMetadataOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param Array.<flatbuffers.Offset> data
-     * @returns flatbuffers.Offset
-     */
-    static createCustomMetadataVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {
-        builder.startVector(4, data.length, 4);
-        for (let i = data.length - 1; i >= 0; i--) {
-            builder.addOffset(data[i]);
-        }
-        return builder.endVector();
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number numElems
-     */
-    static startCustomMetadataVector(builder: flatbuffers.Builder, numElems: number) {
-        builder.startVector(4, numElems, 4);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endFooter(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset offset
-     */
-    static finishFooterBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
-        builder.finish(offset);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset offset
-     */
-    static finishSizePrefixedFooterBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
-        builder.finish(offset, undefined, true);
-    }
-
-    static createFooter(builder: flatbuffers.Builder, version: NS13596923344997147894.MetadataVersion, schemaOffset: flatbuffers.Offset, dictionariesOffset: flatbuffers.Offset, recordBatchesOffset: flatbuffers.Offset, customMetadataOffset: flatbuffers.Offset): flatbuffers.Offset {
-        Footer.startFooter(builder);
-        Footer.addVersion(builder, version);
-        Footer.addSchema(builder, schemaOffset);
-        Footer.addDictionaries(builder, dictionariesOffset);
-        Footer.addRecordBatches(builder, recordBatchesOffset);
-        Footer.addCustomMetadata(builder, customMetadataOffset);
-        return Footer.endFooter(builder);
-    }
-}
-/**
- * @constructor
- */
-export class Block {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Block
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Block {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * Index to the start of the RecordBlock (note this is past the Message header)
-     *
-     * @returns flatbuffers.Long
-     */
-    offset(): flatbuffers.Long {
-        return this.bb!.readInt64(this.bb_pos);
-    }
-
-    /**
-     * Length of the metadata
-     *
-     * @returns number
-     */
-    metaDataLength(): number {
-        return this.bb!.readInt32(this.bb_pos + 8);
-    }
-
-    /**
-     * Length of the data (this is aligned so there can be a gap between this and
-     * the metadata).
-     *
-     * @returns flatbuffers.Long
-     */
-    bodyLength(): flatbuffers.Long {
-        return this.bb!.readInt64(this.bb_pos + 16);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Long offset
-     * @param number metaDataLength
-     * @param flatbuffers.Long bodyLength
-     * @returns flatbuffers.Offset
-     */
-    static createBlock(builder: flatbuffers.Builder, offset: flatbuffers.Long, metaDataLength: number, bodyLength: flatbuffers.Long): flatbuffers.Offset {
-        builder.prep(8, 24);
-        builder.writeInt64(bodyLength);
-        builder.pad(4);
-        builder.writeInt32(metaDataLength);
-        builder.writeInt64(offset);
-        return builder.offset();
-    }
-
-}
diff --git a/js/src/fb/Message.ts b/js/src/fb/Message.ts
deleted file mode 100644
index 973eb04..0000000
--- a/js/src/fb/Message.ts
+++ /dev/null
@@ -1,709 +0,0 @@
-// automatically generated by the FlatBuffers compiler, do not modify
-
-import { flatbuffers } from 'flatbuffers';
-import * as NS13596923344997147894 from './Schema';
-/**
- * @enum {number}
- */
-export enum CompressionType {
-    LZ4_FRAME = 0,
-    ZSTD = 1
-}
-
-/**
- * Provided for forward compatibility in case we need to support different
- * strategies for compressing the IPC message body (like whole-body
- * compression rather than buffer-level) in the future
- *
- * @enum {number}
- */
-export enum BodyCompressionMethod {
-    /**
-     * Each constituent buffer is first compressed with the indicated
-     * compressor, and then written with the uncompressed length in the first 8
-     * bytes as a 64-bit little-endian signed integer followed by the compressed
-     * buffer bytes (and then padding as required by the protocol). The
-     * uncompressed length may be set to -1 to indicate that the data that
-     * follows is not compressed, which can be useful for cases where
-     * compression does not yield appreciable savings.
-     */
-    BUFFER = 0
-}
-
-/**
- * ----------------------------------------------------------------------
- * The root Message type
- * This union enables us to easily send different message types without
- * redundant storage, and in the future we can easily add new message types.
- *
- * Arrow implementations do not need to implement all of the message types,
- * which may include experimental metadata types. For maximum compatibility,
- * it is best to send data using RecordBatch
- *
- * @enum {number}
- */
-export enum MessageHeader {
-    NONE = 0,
-    Schema = 1,
-    DictionaryBatch = 2,
-    RecordBatch = 3,
-    Tensor = 4,
-    SparseTensor = 5
-}
-
-/**
- * ----------------------------------------------------------------------
- * Data structures for describing a table row batch (a collection of
- * equal-length Arrow arrays)
- * Metadata about a field at some level of a nested type tree (but not
- * its children).
- *
- * For example, a List<Int16> with values [[1, 2, 3], null, [4], [5, 6], null]
- * would have {length: 5, null_count: 2} for its List node, and {length: 6,
- * null_count: 0} for its Int16 node, as separate FieldNode structs
- *
- * @constructor
- */
-export class FieldNode {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns FieldNode
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): FieldNode {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * The number of value slots in the Arrow array at this level of a nested
-     * tree
-     *
-     * @returns flatbuffers.Long
-     */
-    length(): flatbuffers.Long {
-        return this.bb!.readInt64(this.bb_pos);
-    }
-
-    /**
-     * The number of observed nulls. Fields with null_count == 0 may choose not
-     * to write their physical validity bitmap out as a materialized buffer,
-     * instead setting the length of the bitmap buffer to 0.
-     *
-     * @returns flatbuffers.Long
-     */
-    nullCount(): flatbuffers.Long {
-        return this.bb!.readInt64(this.bb_pos + 8);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Long length
-     * @param flatbuffers.Long null_count
-     * @returns flatbuffers.Offset
-     */
-    static createFieldNode(builder: flatbuffers.Builder, length: flatbuffers.Long, null_count: flatbuffers.Long): flatbuffers.Offset {
-        builder.prep(8, 16);
-        builder.writeInt64(null_count);
-        builder.writeInt64(length);
-        return builder.offset();
-    }
-
-}
-/**
- * Optional compression for the memory buffers constituting IPC message
- * bodies. Intended for use with RecordBatch but could be used for other
- * message types
- *
- * @constructor
- */
-export class BodyCompression {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns BodyCompression
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): BodyCompression {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param BodyCompression= obj
-     * @returns BodyCompression
-     */
-    static getRootAsBodyCompression(bb: flatbuffers.ByteBuffer, obj?: BodyCompression): BodyCompression {
-        return (obj || new BodyCompression()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param BodyCompression= obj
-     * @returns BodyCompression
-     */
-    static getSizePrefixedRootAsBodyCompression(bb: flatbuffers.ByteBuffer, obj?: BodyCompression): BodyCompression {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new BodyCompression()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * Compressor library
-     *
-     * @returns CompressionType
-     */
-    codec(): CompressionType {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? /**  */ (this.bb!.readInt8(this.bb_pos + offset)) : CompressionType.LZ4_FRAME;
-    }
-
-    /**
-     * Indicates the way the record batch body was compressed
-     *
-     * @returns BodyCompressionMethod
-     */
-    method(): BodyCompressionMethod {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? /**  */ (this.bb!.readInt8(this.bb_pos + offset)) : BodyCompressionMethod.BUFFER;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startBodyCompression(builder: flatbuffers.Builder) {
-        builder.startObject(2);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param CompressionType codec
-     */
-    static addCodec(builder: flatbuffers.Builder, codec: CompressionType) {
-        builder.addFieldInt8(0, codec, CompressionType.LZ4_FRAME);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param BodyCompressionMethod method
-     */
-    static addMethod(builder: flatbuffers.Builder, method: BodyCompressionMethod) {
-        builder.addFieldInt8(1, method, BodyCompressionMethod.BUFFER);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endBodyCompression(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createBodyCompression(builder: flatbuffers.Builder, codec: CompressionType, method: BodyCompressionMethod): flatbuffers.Offset {
-        BodyCompression.startBodyCompression(builder);
-        BodyCompression.addCodec(builder, codec);
-        BodyCompression.addMethod(builder, method);
-        return BodyCompression.endBodyCompression(builder);
-    }
-}
-/**
- * A data header describing the shared memory layout of a "record" or "row"
- * batch. Some systems call this a "row batch" internally and others a "record
- * batch".
- *
- * @constructor
- */
-export class RecordBatch {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns RecordBatch
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): RecordBatch {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param RecordBatch= obj
-     * @returns RecordBatch
-     */
-    static getRootAsRecordBatch(bb: flatbuffers.ByteBuffer, obj?: RecordBatch): RecordBatch {
-        return (obj || new RecordBatch()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param RecordBatch= obj
-     * @returns RecordBatch
-     */
-    static getSizePrefixedRootAsRecordBatch(bb: flatbuffers.ByteBuffer, obj?: RecordBatch): RecordBatch {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new RecordBatch()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * number of records / rows. The arrays in the batch should all have this
-     * length
-     *
-     * @returns flatbuffers.Long
-     */
-    length(): flatbuffers.Long {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);
-    }
-
-    /**
-     * Nodes correspond to the pre-ordered flattened logical schema
-     *
-     * @param number index
-     * @param FieldNode= obj
-     * @returns FieldNode
-     */
-    nodes(index: number, obj?: FieldNode): FieldNode | null {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? (obj || new FieldNode()).__init(this.bb!.__vector(this.bb_pos + offset) + index * 16, this.bb!) : null;
-    }
-
-    /**
-     * @returns number
-     */
-    nodesLength(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * Buffers correspond to the pre-ordered flattened buffer tree
-     *
-     * The number of buffers appended to this list depends on the schema. For
-     * example, most primitive arrays will have 2 buffers, 1 for the validity
-     * bitmap and 1 for the values. For struct arrays, there will only be a
-     * single buffer for the validity (nulls) bitmap
-     *
-     * @param number index
-     * @param Buffer= obj
-     * @returns Buffer
-     */
-    buffers(index: number, obj?: NS13596923344997147894.Buffer): NS13596923344997147894.Buffer | null {
-        const offset = this.bb!.__offset(this.bb_pos, 8);
-        return offset ? (obj || new NS13596923344997147894.Buffer()).__init(this.bb!.__vector(this.bb_pos + offset) + index * 16, this.bb!) : null;
-    }
-
-    /**
-     * @returns number
-     */
-    buffersLength(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 8);
-        return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * Optional compression of the message body
-     *
-     * @param BodyCompression= obj
-     * @returns BodyCompression|null
-     */
-    compression(obj?: BodyCompression): BodyCompression | null {
-        const offset = this.bb!.__offset(this.bb_pos, 10);
-        return offset ? (obj || new BodyCompression()).__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) : null;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startRecordBatch(builder: flatbuffers.Builder) {
-        builder.startObject(4);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Long length
-     */
-    static addLength(builder: flatbuffers.Builder, length: flatbuffers.Long) {
-        builder.addFieldInt64(0, length, builder.createLong(0, 0));
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset nodesOffset
-     */
-    static addNodes(builder: flatbuffers.Builder, nodesOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(1, nodesOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number numElems
-     */
-    static startNodesVector(builder: flatbuffers.Builder, numElems: number) {
-        builder.startVector(16, numElems, 8);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset buffersOffset
-     */
-    static addBuffers(builder: flatbuffers.Builder, buffersOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(2, buffersOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number numElems
-     */
-    static startBuffersVector(builder: flatbuffers.Builder, numElems: number) {
-        builder.startVector(16, numElems, 8);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset compressionOffset
-     */
-    static addCompression(builder: flatbuffers.Builder, compressionOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(3, compressionOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endRecordBatch(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createRecordBatch(builder: flatbuffers.Builder, length: flatbuffers.Long, nodesOffset: flatbuffers.Offset, buffersOffset: flatbuffers.Offset, compressionOffset: flatbuffers.Offset): flatbuffers.Offset {
-        RecordBatch.startRecordBatch(builder);
-        RecordBatch.addLength(builder, length);
-        RecordBatch.addNodes(builder, nodesOffset);
-        RecordBatch.addBuffers(builder, buffersOffset);
-        RecordBatch.addCompression(builder, compressionOffset);
-        return RecordBatch.endRecordBatch(builder);
-    }
-}
-/**
- * For sending dictionary encoding information. Any Field can be
- * dictionary-encoded, but in this case none of its children may be
- * dictionary-encoded.
- * There is one vector / column per dictionary, but that vector / column
- * may be spread across multiple dictionary batches by using the isDelta
- * flag
- *
- * @constructor
- */
-export class DictionaryBatch {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns DictionaryBatch
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): DictionaryBatch {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param DictionaryBatch= obj
-     * @returns DictionaryBatch
-     */
-    static getRootAsDictionaryBatch(bb: flatbuffers.ByteBuffer, obj?: DictionaryBatch): DictionaryBatch {
-        return (obj || new DictionaryBatch()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param DictionaryBatch= obj
-     * @returns DictionaryBatch
-     */
-    static getSizePrefixedRootAsDictionaryBatch(bb: flatbuffers.ByteBuffer, obj?: DictionaryBatch): DictionaryBatch {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new DictionaryBatch()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @returns flatbuffers.Long
-     */
-    id(): flatbuffers.Long {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);
-    }
-
-    /**
-     * @param RecordBatch= obj
-     * @returns RecordBatch|null
-     */
-    data(obj?: RecordBatch): RecordBatch | null {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? (obj || new RecordBatch()).__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) : null;
-    }
-
-    /**
-     * If isDelta is true the values in the dictionary are to be appended to a
-     * dictionary with the indicated id. If isDelta is false this dictionary
-     * should replace the existing dictionary.
-     *
-     * @returns boolean
-     */
-    isDelta(): boolean {
-        const offset = this.bb!.__offset(this.bb_pos, 8);
-        return offset ? !!this.bb!.readInt8(this.bb_pos + offset) : false;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startDictionaryBatch(builder: flatbuffers.Builder) {
-        builder.startObject(3);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Long id
-     */
-    static addId(builder: flatbuffers.Builder, id: flatbuffers.Long) {
-        builder.addFieldInt64(0, id, builder.createLong(0, 0));
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset dataOffset
-     */
-    static addData(builder: flatbuffers.Builder, dataOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(1, dataOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param boolean isDelta
-     */
-    static addIsDelta(builder: flatbuffers.Builder, isDelta: boolean) {
-        builder.addFieldInt8(2, +isDelta, +false);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endDictionaryBatch(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createDictionaryBatch(builder: flatbuffers.Builder, id: flatbuffers.Long, dataOffset: flatbuffers.Offset, isDelta: boolean): flatbuffers.Offset {
-        DictionaryBatch.startDictionaryBatch(builder);
-        DictionaryBatch.addId(builder, id);
-        DictionaryBatch.addData(builder, dataOffset);
-        DictionaryBatch.addIsDelta(builder, isDelta);
-        return DictionaryBatch.endDictionaryBatch(builder);
-    }
-}
-/**
- * @constructor
- */
-export class Message {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Message
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Message {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Message= obj
-     * @returns Message
-     */
-    static getRootAsMessage(bb: flatbuffers.ByteBuffer, obj?: Message): Message {
-        return (obj || new Message()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Message= obj
-     * @returns Message
-     */
-    static getSizePrefixedRootAsMessage(bb: flatbuffers.ByteBuffer, obj?: Message): Message {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Message()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @returns MetadataVersion
-     */
-    version(): NS13596923344997147894.MetadataVersion {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? /**  */ (this.bb!.readInt16(this.bb_pos + offset)) : NS13596923344997147894.MetadataVersion.V1;
-    }
-
-    /**
-     * @returns MessageHeader
-     */
-    headerType(): MessageHeader {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? /**  */ (this.bb!.readUint8(this.bb_pos + offset)) : MessageHeader.NONE;
-    }
-
-    /**
-     * @param flatbuffers.Table obj
-     * @returns ?flatbuffers.Table
-     */
-    header<T extends flatbuffers.Table>(obj: T): T | null {
-        const offset = this.bb!.__offset(this.bb_pos, 8);
-        return offset ? this.bb!.__union(obj, this.bb_pos + offset) : null;
-    }
-
-    /**
-     * @returns flatbuffers.Long
-     */
-    bodyLength(): flatbuffers.Long {
-        const offset = this.bb!.__offset(this.bb_pos, 10);
-        return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);
-    }
-
-    /**
-     * @param number index
-     * @param KeyValue= obj
-     * @returns KeyValue
-     */
-    customMetadata(index: number, obj?: NS13596923344997147894.KeyValue): NS13596923344997147894.KeyValue | null {
-        const offset = this.bb!.__offset(this.bb_pos, 12);
-        return offset ? (obj || new NS13596923344997147894.KeyValue()).__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) : null;
-    }
-
-    /**
-     * @returns number
-     */
-    customMetadataLength(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 12);
-        return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startMessage(builder: flatbuffers.Builder) {
-        builder.startObject(5);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param MetadataVersion version
-     */
-    static addVersion(builder: flatbuffers.Builder, version: NS13596923344997147894.MetadataVersion) {
-        builder.addFieldInt16(0, version, NS13596923344997147894.MetadataVersion.V1);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param MessageHeader headerType
-     */
-    static addHeaderType(builder: flatbuffers.Builder, headerType: MessageHeader) {
-        builder.addFieldInt8(1, headerType, MessageHeader.NONE);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset headerOffset
-     */
-    static addHeader(builder: flatbuffers.Builder, headerOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(2, headerOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Long bodyLength
-     */
-    static addBodyLength(builder: flatbuffers.Builder, bodyLength: flatbuffers.Long) {
-        builder.addFieldInt64(3, bodyLength, builder.createLong(0, 0));
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset customMetadataOffset
-     */
-    static addCustomMetadata(builder: flatbuffers.Builder, customMetadataOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(4, customMetadataOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param Array.<flatbuffers.Offset> data
-     * @returns flatbuffers.Offset
-     */
-    static createCustomMetadataVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {
-        builder.startVector(4, data.length, 4);
-        for (let i = data.length - 1; i >= 0; i--) {
-            builder.addOffset(data[i]);
-        }
-        return builder.endVector();
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number numElems
-     */
-    static startCustomMetadataVector(builder: flatbuffers.Builder, numElems: number) {
-        builder.startVector(4, numElems, 4);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endMessage(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset offset
-     */
-    static finishMessageBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
-        builder.finish(offset);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset offset
-     */
-    static finishSizePrefixedMessageBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
-        builder.finish(offset, undefined, true);
-    }
-
-    static createMessage(builder: flatbuffers.Builder, version: NS13596923344997147894.MetadataVersion, headerType: MessageHeader, headerOffset: flatbuffers.Offset, bodyLength: flatbuffers.Long, customMetadataOffset: flatbuffers.Offset): flatbuffers.Offset {
-        Message.startMessage(builder);
-        Message.addVersion(builder, version);
-        Message.addHeaderType(builder, headerType);
-        Message.addHeader(builder, headerOffset);
-        Message.addBodyLength(builder, bodyLength);
-        Message.addCustomMetadata(builder, customMetadataOffset);
-        return Message.endMessage(builder);
-    }
-}
diff --git a/js/src/fb/Schema.ts b/js/src/fb/Schema.ts
deleted file mode 100644
index f675bc2..0000000
--- a/js/src/fb/Schema.ts
+++ /dev/null
@@ -1,2658 +0,0 @@
-// automatically generated by the FlatBuffers compiler, do not modify
-
-import { flatbuffers } from 'flatbuffers';
-/**
- * Logical types, vector layouts, and schemas
- *
- * @enum {number}
- */
-export enum MetadataVersion {
-    /**
-     * 0.1.0 (October 2016).
-     */
-    V1 = 0,
-
-    /**
-     * 0.2.0 (February 2017). Non-backwards compatible with V1.
-     */
-    V2 = 1,
-
-    /**
-     * 0.3.0 -> 0.7.1 (May - December 2017). Non-backwards compatible with V2.
-     */
-    V3 = 2,
-
-    /**
-     * >= 0.8.0 (December 2017). Non-backwards compatible with V3.
-     */
-    V4 = 3,
-
-    /**
-     * >= 1.0.0 (July 2020. Backwards compatible with V4 (V5 readers can read V4
-     * metadata and IPC messages). Implementations are recommended to provide a
-     * V4 compatibility mode with V5 format changes disabled.
-     *
-     * Incompatible changes between V4 and V5:
-     * - Union buffer layout has changed. In V5, Unions don't have a validity
-     *   bitmap buffer.
-     */
-    V5 = 4
-}
-
-/**
- * Represents Arrow Features that might not have full support
- * within implementations. This is intended to be used in
- * two scenarios:
- *  1.  A mechanism for readers of Arrow Streams
- *      and files to understand that the stream or file makes
- *      use of a feature that isn't supported or unknown to
- *      the implementation (and therefore can meet the Arrow
- *      forward compatibility guarantees).
- *  2.  A means of negotiating between a client and server
- *      what features a stream is allowed to use. The enums
- *      values here are intented to represent higher level
- *      features, additional details maybe negotiated
- *      with key-value pairs specific to the protocol.
- *
- * Enums added to this list should be assigned power-of-two values
- * to facilitate exchanging and comparing bitmaps for supported
- * features.
- *
- * @enum {number}
- */
-export enum Feature {
-    /**
-     * Needed to make flatbuffers happy.
-     */
-    UNUSED = 0,
-
-    /**
-     * The stream makes use of multiple full dictionaries with the
-     * same ID and assumes clients implement dictionary replacement
-     * correctly.
-     */
-    DICTIONARY_REPLACEMENT = 1,
-
-    /**
-     * The stream makes use of compressed bodies as described
-     * in Message.fbs.
-     */
-    COMPRESSED_BODY = 2
-}
-
-/**
- * @enum {number}
- */
-export enum UnionMode {
-    Sparse = 0,
-    Dense = 1
-}
-
-/**
- * @enum {number}
- */
-export enum Precision {
-    HALF = 0,
-    SINGLE = 1,
-    DOUBLE = 2
-}
-
-/**
- * @enum {number}
- */
-export enum DateUnit {
-    DAY = 0,
-    MILLISECOND = 1
-}
-
-/**
- * @enum {number}
- */
-export enum TimeUnit {
-    SECOND = 0,
-    MILLISECOND = 1,
-    MICROSECOND = 2,
-    NANOSECOND = 3
-}
-
-/**
- * @enum {number}
- */
-export enum IntervalUnit {
-    YEAR_MONTH = 0,
-    DAY_TIME = 1
-}
-
-/**
- * ----------------------------------------------------------------------
- * Top-level Type value, enabling extensible type-specific metadata. We can
- * add new logical types to Type without breaking backwards compatibility
- *
- * @enum {number}
- */
-export enum Type {
-    NONE = 0,
-    Null = 1,
-    Int = 2,
-    FloatingPoint = 3,
-    Binary = 4,
-    Utf8 = 5,
-    Bool = 6,
-    Decimal = 7,
-    Date = 8,
-    Time = 9,
-    Timestamp = 10,
-    Interval = 11,
-    List = 12,
-    Struct_ = 13,
-    Union = 14,
-    FixedSizeBinary = 15,
-    FixedSizeList = 16,
-    Map = 17,
-    Duration = 18,
-    LargeBinary = 19,
-    LargeUtf8 = 20,
-    LargeList = 21
-}
-
-/**
- * ----------------------------------------------------------------------
- * Dictionary encoding metadata
- * Maintained for forwards compatibility, in the future
- * Dictionaries might be explicit maps between integers and values
- * allowing for non-contiguous index values
- *
- * @enum {number}
- */
-export enum DictionaryKind {
-    DenseArray = 0
-}
-
-/**
- * ----------------------------------------------------------------------
- * Endianness of the platform producing the data
- *
- * @enum {number}
- */
-export enum Endianness {
-    Little = 0,
-    Big = 1
-}
-
-/**
- * These are stored in the flatbuffer in the Type union below
- *
- * @constructor
- */
-export class Null {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Null
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Null {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Null= obj
-     * @returns Null
-     */
-    static getRootAsNull(bb: flatbuffers.ByteBuffer, obj?: Null): Null {
-        return (obj || new Null()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Null= obj
-     * @returns Null
-     */
-    static getSizePrefixedRootAsNull(bb: flatbuffers.ByteBuffer, obj?: Null): Null {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Null()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startNull(builder: flatbuffers.Builder) {
-        builder.startObject(0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endNull(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createNull(builder: flatbuffers.Builder): flatbuffers.Offset {
-        Null.startNull(builder);
-        return Null.endNull(builder);
-    }
-}
-/**
- * A Struct_ in the flatbuffer metadata is the same as an Arrow Struct
- * (according to the physical memory layout). We used Struct_ here as
- * Struct is a reserved word in Flatbuffers
- *
- * @constructor
- */
-export class Struct_ {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Struct_
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Struct_ {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Struct_= obj
-     * @returns Struct_
-     */
-    static getRootAsStruct_(bb: flatbuffers.ByteBuffer, obj?: Struct_): Struct_ {
-        return (obj || new Struct_()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Struct_= obj
-     * @returns Struct_
-     */
-    static getSizePrefixedRootAsStruct_(bb: flatbuffers.ByteBuffer, obj?: Struct_): Struct_ {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Struct_()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startStruct_(builder: flatbuffers.Builder) {
-        builder.startObject(0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endStruct_(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createStruct_(builder: flatbuffers.Builder): flatbuffers.Offset {
-        Struct_.startStruct_(builder);
-        return Struct_.endStruct_(builder);
-    }
-}
-/**
- * @constructor
- */
-export class List {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns List
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): List {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param List= obj
-     * @returns List
-     */
-    static getRootAsList(bb: flatbuffers.ByteBuffer, obj?: List): List {
-        return (obj || new List()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param List= obj
-     * @returns List
-     */
-    static getSizePrefixedRootAsList(bb: flatbuffers.ByteBuffer, obj?: List): List {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new List()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startList(builder: flatbuffers.Builder) {
-        builder.startObject(0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endList(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createList(builder: flatbuffers.Builder): flatbuffers.Offset {
-        List.startList(builder);
-        return List.endList(builder);
-    }
-}
-/**
- * Same as List, but with 64-bit offsets, allowing to represent
- * extremely large data values.
- *
- * @constructor
- */
-export class LargeList {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns LargeList
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): LargeList {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param LargeList= obj
-     * @returns LargeList
-     */
-    static getRootAsLargeList(bb: flatbuffers.ByteBuffer, obj?: LargeList): LargeList {
-        return (obj || new LargeList()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param LargeList= obj
-     * @returns LargeList
-     */
-    static getSizePrefixedRootAsLargeList(bb: flatbuffers.ByteBuffer, obj?: LargeList): LargeList {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new LargeList()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startLargeList(builder: flatbuffers.Builder) {
-        builder.startObject(0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endLargeList(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createLargeList(builder: flatbuffers.Builder): flatbuffers.Offset {
-        LargeList.startLargeList(builder);
-        return LargeList.endLargeList(builder);
-    }
-}
-/**
- * @constructor
- */
-export class FixedSizeList {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns FixedSizeList
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): FixedSizeList {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param FixedSizeList= obj
-     * @returns FixedSizeList
-     */
-    static getRootAsFixedSizeList(bb: flatbuffers.ByteBuffer, obj?: FixedSizeList): FixedSizeList {
-        return (obj || new FixedSizeList()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param FixedSizeList= obj
-     * @returns FixedSizeList
-     */
-    static getSizePrefixedRootAsFixedSizeList(bb: flatbuffers.ByteBuffer, obj?: FixedSizeList): FixedSizeList {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new FixedSizeList()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * Number of list items per value
-     *
-     * @returns number
-     */
-    listSize(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startFixedSizeList(builder: flatbuffers.Builder) {
-        builder.startObject(1);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number listSize
-     */
-    static addListSize(builder: flatbuffers.Builder, listSize: number) {
-        builder.addFieldInt32(0, listSize, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endFixedSizeList(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createFixedSizeList(builder: flatbuffers.Builder, listSize: number): flatbuffers.Offset {
-        FixedSizeList.startFixedSizeList(builder);
-        FixedSizeList.addListSize(builder, listSize);
-        return FixedSizeList.endFixedSizeList(builder);
-    }
-}
-/**
- * A Map is a logical nested type that is represented as
- *
- * List<entries: Struct<key: K, value: V>>
- *
- * In this layout, the keys and values are each respectively contiguous. We do
- * not constrain the key and value types, so the application is responsible
- * for ensuring that the keys are hashable and unique. Whether the keys are sorted
- * may be set in the metadata for this field.
- *
- * In a field with Map type, the field has a child Struct field, which then
- * has two children: key type and the second the value type. The names of the
- * child fields may be respectively "entries", "key", and "value", but this is
- * not enforced.
- *
- * Map
- *   - child[0] entries: Struct
- *     - child[0] key: K
- *     - child[1] value: V
- *
- * Neither the "entries" field nor the "key" field may be nullable.
- *
- * The metadata is structured so that Arrow systems without special handling
- * for Map can make Map an alias for List. The "layout" attribute for the Map
- * field must have the same contents as a List.
- *
- * @constructor
- */
-export class Map {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Map
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Map {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Map= obj
-     * @returns Map
-     */
-    static getRootAsMap(bb: flatbuffers.ByteBuffer, obj?: Map): Map {
-        return (obj || new Map()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Map= obj
-     * @returns Map
-     */
-    static getSizePrefixedRootAsMap(bb: flatbuffers.ByteBuffer, obj?: Map): Map {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Map()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * Set to true if the keys within each value are sorted
-     *
-     * @returns boolean
-     */
-    keysSorted(): boolean {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? !!this.bb!.readInt8(this.bb_pos + offset) : false;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startMap(builder: flatbuffers.Builder) {
-        builder.startObject(1);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param boolean keysSorted
-     */
-    static addKeysSorted(builder: flatbuffers.Builder, keysSorted: boolean) {
-        builder.addFieldInt8(0, +keysSorted, +false);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endMap(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createMap(builder: flatbuffers.Builder, keysSorted: boolean): flatbuffers.Offset {
-        Map.startMap(builder);
-        Map.addKeysSorted(builder, keysSorted);
-        return Map.endMap(builder);
-    }
-}
-/**
- * A union is a complex type with children in Field
- * By default ids in the type vector refer to the offsets in the children
- * optionally typeIds provides an indirection between the child offset and the type id
- * for each child typeIds[offset] is the id used in the type vector
- *
- * @constructor
- */
-export class Union {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Union
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Union {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Union= obj
-     * @returns Union
-     */
-    static getRootAsUnion(bb: flatbuffers.ByteBuffer, obj?: Union): Union {
-        return (obj || new Union()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Union= obj
-     * @returns Union
-     */
-    static getSizePrefixedRootAsUnion(bb: flatbuffers.ByteBuffer, obj?: Union): Union {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Union()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @returns UnionMode
-     */
-    mode(): UnionMode {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? /**  */ (this.bb!.readInt16(this.bb_pos + offset)) : UnionMode.Sparse;
-    }
-
-    /**
-     * @param number index
-     * @returns number
-     */
-    typeIds(index: number): number | null {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? this.bb!.readInt32(this.bb!.__vector(this.bb_pos + offset) + index * 4) : 0;
-    }
-
-    /**
-     * @returns number
-     */
-    typeIdsLength(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * @returns Int32Array
-     */
-    typeIdsArray(): Int32Array | null {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? new Int32Array(this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset), this.bb!.__vector_len(this.bb_pos + offset)) : null;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startUnion(builder: flatbuffers.Builder) {
-        builder.startObject(2);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param UnionMode mode
-     */
-    static addMode(builder: flatbuffers.Builder, mode: UnionMode) {
-        builder.addFieldInt16(0, mode, UnionMode.Sparse);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param flatbuffers.Offset typeIdsOffset
-     */
-    static addTypeIds(builder: flatbuffers.Builder, typeIdsOffset: flatbuffers.Offset) {
-        builder.addFieldOffset(1, typeIdsOffset, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param Array.<number> data
-     * @returns flatbuffers.Offset
-     */
-    static createTypeIdsVector(builder: flatbuffers.Builder, data: number[] | Int32Array): flatbuffers.Offset {
-        builder.startVector(4, data.length, 4);
-        for (let i = data.length - 1; i >= 0; i--) {
-            builder.addInt32(data[i]);
-        }
-        return builder.endVector();
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number numElems
-     */
-    static startTypeIdsVector(builder: flatbuffers.Builder, numElems: number) {
-        builder.startVector(4, numElems, 4);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endUnion(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createUnion(builder: flatbuffers.Builder, mode: UnionMode, typeIdsOffset: flatbuffers.Offset): flatbuffers.Offset {
-        Union.startUnion(builder);
-        Union.addMode(builder, mode);
-        Union.addTypeIds(builder, typeIdsOffset);
-        return Union.endUnion(builder);
-    }
-}
-/**
- * @constructor
- */
-export class Int {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Int
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Int {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Int= obj
-     * @returns Int
-     */
-    static getRootAsInt(bb: flatbuffers.ByteBuffer, obj?: Int): Int {
-        return (obj || new Int()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Int= obj
-     * @returns Int
-     */
-    static getSizePrefixedRootAsInt(bb: flatbuffers.ByteBuffer, obj?: Int): Int {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Int()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @returns number
-     */
-    bitWidth(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * @returns boolean
-     */
-    isSigned(): boolean {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? !!this.bb!.readInt8(this.bb_pos + offset) : false;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startInt(builder: flatbuffers.Builder) {
-        builder.startObject(2);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number bitWidth
-     */
-    static addBitWidth(builder: flatbuffers.Builder, bitWidth: number) {
-        builder.addFieldInt32(0, bitWidth, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param boolean isSigned
-     */
-    static addIsSigned(builder: flatbuffers.Builder, isSigned: boolean) {
-        builder.addFieldInt8(1, +isSigned, +false);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endInt(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createInt(builder: flatbuffers.Builder, bitWidth: number, isSigned: boolean): flatbuffers.Offset {
-        Int.startInt(builder);
-        Int.addBitWidth(builder, bitWidth);
-        Int.addIsSigned(builder, isSigned);
-        return Int.endInt(builder);
-    }
-}
-/**
- * @constructor
- */
-export class FloatingPoint {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns FloatingPoint
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): FloatingPoint {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param FloatingPoint= obj
-     * @returns FloatingPoint
-     */
-    static getRootAsFloatingPoint(bb: flatbuffers.ByteBuffer, obj?: FloatingPoint): FloatingPoint {
-        return (obj || new FloatingPoint()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param FloatingPoint= obj
-     * @returns FloatingPoint
-     */
-    static getSizePrefixedRootAsFloatingPoint(bb: flatbuffers.ByteBuffer, obj?: FloatingPoint): FloatingPoint {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new FloatingPoint()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @returns Precision
-     */
-    precision(): Precision {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? /**  */ (this.bb!.readInt16(this.bb_pos + offset)) : Precision.HALF;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startFloatingPoint(builder: flatbuffers.Builder) {
-        builder.startObject(1);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param Precision precision
-     */
-    static addPrecision(builder: flatbuffers.Builder, precision: Precision) {
-        builder.addFieldInt16(0, precision, Precision.HALF);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endFloatingPoint(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createFloatingPoint(builder: flatbuffers.Builder, precision: Precision): flatbuffers.Offset {
-        FloatingPoint.startFloatingPoint(builder);
-        FloatingPoint.addPrecision(builder, precision);
-        return FloatingPoint.endFloatingPoint(builder);
-    }
-}
-/**
- * Unicode with UTF-8 encoding
- *
- * @constructor
- */
-export class Utf8 {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Utf8
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Utf8 {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Utf8= obj
-     * @returns Utf8
-     */
-    static getRootAsUtf8(bb: flatbuffers.ByteBuffer, obj?: Utf8): Utf8 {
-        return (obj || new Utf8()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Utf8= obj
-     * @returns Utf8
-     */
-    static getSizePrefixedRootAsUtf8(bb: flatbuffers.ByteBuffer, obj?: Utf8): Utf8 {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Utf8()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startUtf8(builder: flatbuffers.Builder) {
-        builder.startObject(0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endUtf8(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createUtf8(builder: flatbuffers.Builder): flatbuffers.Offset {
-        Utf8.startUtf8(builder);
-        return Utf8.endUtf8(builder);
-    }
-}
-/**
- * Opaque binary data
- *
- * @constructor
- */
-export class Binary {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Binary
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Binary {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Binary= obj
-     * @returns Binary
-     */
-    static getRootAsBinary(bb: flatbuffers.ByteBuffer, obj?: Binary): Binary {
-        return (obj || new Binary()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Binary= obj
-     * @returns Binary
-     */
-    static getSizePrefixedRootAsBinary(bb: flatbuffers.ByteBuffer, obj?: Binary): Binary {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Binary()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startBinary(builder: flatbuffers.Builder) {
-        builder.startObject(0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endBinary(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createBinary(builder: flatbuffers.Builder): flatbuffers.Offset {
-        Binary.startBinary(builder);
-        return Binary.endBinary(builder);
-    }
-}
-/**
- * Same as Utf8, but with 64-bit offsets, allowing to represent
- * extremely large data values.
- *
- * @constructor
- */
-export class LargeUtf8 {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns LargeUtf8
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): LargeUtf8 {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param LargeUtf8= obj
-     * @returns LargeUtf8
-     */
-    static getRootAsLargeUtf8(bb: flatbuffers.ByteBuffer, obj?: LargeUtf8): LargeUtf8 {
-        return (obj || new LargeUtf8()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param LargeUtf8= obj
-     * @returns LargeUtf8
-     */
-    static getSizePrefixedRootAsLargeUtf8(bb: flatbuffers.ByteBuffer, obj?: LargeUtf8): LargeUtf8 {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new LargeUtf8()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startLargeUtf8(builder: flatbuffers.Builder) {
-        builder.startObject(0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endLargeUtf8(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createLargeUtf8(builder: flatbuffers.Builder): flatbuffers.Offset {
-        LargeUtf8.startLargeUtf8(builder);
-        return LargeUtf8.endLargeUtf8(builder);
-    }
-}
-/**
- * Same as Binary, but with 64-bit offsets, allowing to represent
- * extremely large data values.
- *
- * @constructor
- */
-export class LargeBinary {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns LargeBinary
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): LargeBinary {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param LargeBinary= obj
-     * @returns LargeBinary
-     */
-    static getRootAsLargeBinary(bb: flatbuffers.ByteBuffer, obj?: LargeBinary): LargeBinary {
-        return (obj || new LargeBinary()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param LargeBinary= obj
-     * @returns LargeBinary
-     */
-    static getSizePrefixedRootAsLargeBinary(bb: flatbuffers.ByteBuffer, obj?: LargeBinary): LargeBinary {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new LargeBinary()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startLargeBinary(builder: flatbuffers.Builder) {
-        builder.startObject(0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endLargeBinary(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createLargeBinary(builder: flatbuffers.Builder): flatbuffers.Offset {
-        LargeBinary.startLargeBinary(builder);
-        return LargeBinary.endLargeBinary(builder);
-    }
-}
-/**
- * @constructor
- */
-export class FixedSizeBinary {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns FixedSizeBinary
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): FixedSizeBinary {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param FixedSizeBinary= obj
-     * @returns FixedSizeBinary
-     */
-    static getRootAsFixedSizeBinary(bb: flatbuffers.ByteBuffer, obj?: FixedSizeBinary): FixedSizeBinary {
-        return (obj || new FixedSizeBinary()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param FixedSizeBinary= obj
-     * @returns FixedSizeBinary
-     */
-    static getSizePrefixedRootAsFixedSizeBinary(bb: flatbuffers.ByteBuffer, obj?: FixedSizeBinary): FixedSizeBinary {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new FixedSizeBinary()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * Number of bytes per value
-     *
-     * @returns number
-     */
-    byteWidth(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startFixedSizeBinary(builder: flatbuffers.Builder) {
-        builder.startObject(1);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number byteWidth
-     */
-    static addByteWidth(builder: flatbuffers.Builder, byteWidth: number) {
-        builder.addFieldInt32(0, byteWidth, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endFixedSizeBinary(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createFixedSizeBinary(builder: flatbuffers.Builder, byteWidth: number): flatbuffers.Offset {
-        FixedSizeBinary.startFixedSizeBinary(builder);
-        FixedSizeBinary.addByteWidth(builder, byteWidth);
-        return FixedSizeBinary.endFixedSizeBinary(builder);
-    }
-}
-/**
- * @constructor
- */
-export class Bool {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Bool
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Bool {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Bool= obj
-     * @returns Bool
-     */
-    static getRootAsBool(bb: flatbuffers.ByteBuffer, obj?: Bool): Bool {
-        return (obj || new Bool()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Bool= obj
-     * @returns Bool
-     */
-    static getSizePrefixedRootAsBool(bb: flatbuffers.ByteBuffer, obj?: Bool): Bool {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Bool()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startBool(builder: flatbuffers.Builder) {
-        builder.startObject(0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endBool(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createBool(builder: flatbuffers.Builder): flatbuffers.Offset {
-        Bool.startBool(builder);
-        return Bool.endBool(builder);
-    }
-}
-/**
- * Exact decimal value represented as an integer value in two's
- * complement. Currently only 128-bit (16-byte) and 256-bit (32-byte) integers
- * are used. The representation uses the endianness indicated
- * in the Schema.
- *
- * @constructor
- */
-export class Decimal {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Decimal
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Decimal {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Decimal= obj
-     * @returns Decimal
-     */
-    static getRootAsDecimal(bb: flatbuffers.ByteBuffer, obj?: Decimal): Decimal {
-        return (obj || new Decimal()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Decimal= obj
-     * @returns Decimal
-     */
-    static getSizePrefixedRootAsDecimal(bb: flatbuffers.ByteBuffer, obj?: Decimal): Decimal {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Decimal()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * Total number of decimal digits
-     *
-     * @returns number
-     */
-    precision(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * Number of digits after the decimal point "."
-     *
-     * @returns number
-     */
-    scale(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0;
-    }
-
-    /**
-     * Number of bits per value. The only accepted widths are 128 and 256.
-     * We use bitWidth for consistency with Int::bitWidth.
-     *
-     * @returns number
-     */
-    bitWidth(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 8);
-        return offset ? this.bb!.readInt32(this.bb_pos + offset) : 128;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startDecimal(builder: flatbuffers.Builder) {
-        builder.startObject(3);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number precision
-     */
-    static addPrecision(builder: flatbuffers.Builder, precision: number) {
-        builder.addFieldInt32(0, precision, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number scale
-     */
-    static addScale(builder: flatbuffers.Builder, scale: number) {
-        builder.addFieldInt32(1, scale, 0);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number bitWidth
-     */
-    static addBitWidth(builder: flatbuffers.Builder, bitWidth: number) {
-        builder.addFieldInt32(2, bitWidth, 128);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endDecimal(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createDecimal(builder: flatbuffers.Builder, precision: number, scale: number, bitWidth: number): flatbuffers.Offset {
-        Decimal.startDecimal(builder);
-        Decimal.addPrecision(builder, precision);
-        Decimal.addScale(builder, scale);
-        Decimal.addBitWidth(builder, bitWidth);
-        return Decimal.endDecimal(builder);
-    }
-}
-/**
- * Date is either a 32-bit or 64-bit type representing elapsed time since UNIX
- * epoch (1970-01-01), stored in either of two units:
- *
- * * Milliseconds (64 bits) indicating UNIX time elapsed since the epoch (no
- *   leap seconds), where the values are evenly divisible by 86400000
- * * Days (32 bits) since the UNIX epoch
- *
- * @constructor
- */
-export class Date {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Date
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Date {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Date= obj
-     * @returns Date
-     */
-    static getRootAsDate(bb: flatbuffers.ByteBuffer, obj?: Date): Date {
-        return (obj || new Date()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Date= obj
-     * @returns Date
-     */
-    static getSizePrefixedRootAsDate(bb: flatbuffers.ByteBuffer, obj?: Date): Date {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Date()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @returns DateUnit
-     */
-    unit(): DateUnit {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? /**  */ (this.bb!.readInt16(this.bb_pos + offset)) : DateUnit.MILLISECOND;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startDate(builder: flatbuffers.Builder) {
-        builder.startObject(1);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param DateUnit unit
-     */
-    static addUnit(builder: flatbuffers.Builder, unit: DateUnit) {
-        builder.addFieldInt16(0, unit, DateUnit.MILLISECOND);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endDate(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createDate(builder: flatbuffers.Builder, unit: DateUnit): flatbuffers.Offset {
-        Date.startDate(builder);
-        Date.addUnit(builder, unit);
-        return Date.endDate(builder);
-    }
-}
-/**
- * Time type. The physical storage type depends on the unit
- * - SECOND and MILLISECOND: 32 bits
- * - MICROSECOND and NANOSECOND: 64 bits
- *
- * @constructor
- */
-export class Time {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Time
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Time {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Time= obj
-     * @returns Time
-     */
-    static getRootAsTime(bb: flatbuffers.ByteBuffer, obj?: Time): Time {
-        return (obj || new Time()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Time= obj
-     * @returns Time
-     */
-    static getSizePrefixedRootAsTime(bb: flatbuffers.ByteBuffer, obj?: Time): Time {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Time()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @returns TimeUnit
-     */
-    unit(): TimeUnit {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? /**  */ (this.bb!.readInt16(this.bb_pos + offset)) : TimeUnit.MILLISECOND;
-    }
-
-    /**
-     * @returns number
-     */
-    bitWidth(): number {
-        const offset = this.bb!.__offset(this.bb_pos, 6);
-        return offset ? this.bb!.readInt32(this.bb_pos + offset) : 32;
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     */
-    static startTime(builder: flatbuffers.Builder) {
-        builder.startObject(2);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param TimeUnit unit
-     */
-    static addUnit(builder: flatbuffers.Builder, unit: TimeUnit) {
-        builder.addFieldInt16(0, unit, TimeUnit.MILLISECOND);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @param number bitWidth
-     */
-    static addBitWidth(builder: flatbuffers.Builder, bitWidth: number) {
-        builder.addFieldInt32(1, bitWidth, 32);
-    }
-
-    /**
-     * @param flatbuffers.Builder builder
-     * @returns flatbuffers.Offset
-     */
-    static endTime(builder: flatbuffers.Builder): flatbuffers.Offset {
-        const offset = builder.endObject();
-        return offset;
-    }
-
-    static createTime(builder: flatbuffers.Builder, unit: TimeUnit, bitWidth: number): flatbuffers.Offset {
-        Time.startTime(builder);
-        Time.addUnit(builder, unit);
-        Time.addBitWidth(builder, bitWidth);
-        return Time.endTime(builder);
-    }
-}
-/**
- * Time elapsed from the Unix epoch, 00:00:00.000 on 1 January 1970, excluding
- * leap seconds, as a 64-bit integer. Note that UNIX time does not include
- * leap seconds.
- *
- * The Timestamp metadata supports both "time zone naive" and "time zone
- * aware" timestamps. Read about the timezone attribute for more detail
- *
- * @constructor
- */
-export class Timestamp {
-    bb: flatbuffers.ByteBuffer | null = null;
-
-    bb_pos: number = 0;
-    /**
-     * @param number i
-     * @param flatbuffers.ByteBuffer bb
-     * @returns Timestamp
-     */
-    __init(i: number, bb: flatbuffers.ByteBuffer): Timestamp {
-        this.bb_pos = i;
-        this.bb = bb;
-        return this;
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Timestamp= obj
-     * @returns Timestamp
-     */
-    static getRootAsTimestamp(bb: flatbuffers.ByteBuffer, obj?: Timestamp): Timestamp {
-        return (obj || new Timestamp()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @param flatbuffers.ByteBuffer bb
-     * @param Timestamp= obj
-     * @returns Timestamp
-     */
-    static getSizePrefixedRootAsTimestamp(bb: flatbuffers.ByteBuffer, obj?: Timestamp): Timestamp {
-        bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
-        return (obj || new Timestamp()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
-    }
-
-    /**
-     * @returns TimeUnit
-     */
-    unit(): TimeUnit {
-        const offset = this.bb!.__offset(this.bb_pos, 4);
-        return offset ? /**  */ (this.bb!.readInt16(this.bb_pos + offset)) : TimeUnit.SECOND;
-    }
-
-    /**
-     * The time zone is a string indicating the name of a time zone, one of:
... 32342 lines suppressed ...

[arrow-rs] 12/14: Removed c_glib.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit 32a88cfe5b8d275612106571514eb50486d85e5c
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:23:16 2021 +0000

    Removed c_glib.
---
 c_glib/.gitignore                                  |   70 -
 c_glib/Brewfile                                    |   22 -
 c_glib/Gemfile                                     |   23 -
 c_glib/README.md                                   |  315 -
 c_glib/arrow-cuda-glib/arrow-cuda-glib.h           |   24 -
 c_glib/arrow-cuda-glib/arrow-cuda-glib.hpp         |   24 -
 c_glib/arrow-cuda-glib/cuda.cpp                    |  944 ---
 c_glib/arrow-cuda-glib/cuda.h                      |  183 -
 c_glib/arrow-cuda-glib/cuda.hpp                    |   54 -
 c_glib/arrow-cuda-glib/meson.build                 |   81 -
 c_glib/arrow-dataset-glib/arrow-dataset-glib.h     |   26 -
 c_glib/arrow-dataset-glib/arrow-dataset-glib.hpp   |   26 -
 c_glib/arrow-dataset-glib/file-format.cpp          |  265 -
 c_glib/arrow-dataset-glib/file-format.h            |   92 -
 c_glib/arrow-dataset-glib/file-format.hpp          |   30 -
 c_glib/arrow-dataset-glib/fragment.cpp             |  184 -
 c_glib/arrow-dataset-glib/fragment.h               |   58 -
 c_glib/arrow-dataset-glib/fragment.hpp             |   33 -
 c_glib/arrow-dataset-glib/meson.build              |   82 -
 c_glib/arrow-dataset-glib/scanner.cpp              |  522 --
 c_glib/arrow-dataset-glib/scanner.h                |   88 -
 c_glib/arrow-dataset-glib/scanner.hpp              |   35 -
 c_glib/arrow-glib/array-builder.cpp                | 6178 --------------------
 c_glib/arrow-glib/array-builder.h                  | 1387 -----
 c_glib/arrow-glib/array-builder.hpp                |   27 -
 c_glib/arrow-glib/array.h                          |   23 -
 c_glib/arrow-glib/array.hpp                        |   23 -
 c_glib/arrow-glib/arrow-glib.h                     |   56 -
 c_glib/arrow-glib/arrow-glib.hpp                   |   51 -
 c_glib/arrow-glib/basic-array.cpp                  | 3011 ----------
 c_glib/arrow-glib/basic-array.h                    |  661 ---
 c_glib/arrow-glib/basic-array.hpp                  |   47 -
 c_glib/arrow-glib/basic-data-type.cpp              | 1984 -------
 c_glib/arrow-glib/basic-data-type.h                |  586 --
 c_glib/arrow-glib/basic-data-type.hpp              |   67 -
 c_glib/arrow-glib/buffer.cpp                       |  695 ---
 c_glib/arrow-glib/buffer.h                         |  106 -
 c_glib/arrow-glib/buffer.hpp                       |   43 -
 c_glib/arrow-glib/chunked-array.cpp                |  370 --
 c_glib/arrow-glib/chunked-array.h                  |   66 -
 c_glib/arrow-glib/chunked-array.hpp                |   27 -
 c_glib/arrow-glib/codec.cpp                        |  263 -
 c_glib/arrow-glib/codec.h                          |   74 -
 c_glib/arrow-glib/codec.hpp                        |   34 -
 c_glib/arrow-glib/composite-array.cpp              | 1706 ------
 c_glib/arrow-glib/composite-array.h                |  240 -
 c_glib/arrow-glib/composite-data-type.cpp          |  720 ---
 c_glib/arrow-glib/composite-data-type.h            |  199 -
 c_glib/arrow-glib/compute.cpp                      | 3119 ----------
 c_glib/arrow-glib/compute.h                        |  530 --
 c_glib/arrow-glib/compute.hpp                      |   70 -
 c_glib/arrow-glib/data-type.h                      |   23 -
 c_glib/arrow-glib/data-type.hpp                    |   23 -
 c_glib/arrow-glib/datum.cpp                        |  683 ---
 c_glib/arrow-glib/datum.h                          |  121 -
 c_glib/arrow-glib/datum.hpp                        |   42 -
 c_glib/arrow-glib/decimal.cpp                      | 1115 ----
 c_glib/arrow-glib/decimal.h                        |  154 -
 c_glib/arrow-glib/decimal.hpp                      |   36 -
 c_glib/arrow-glib/enums.c.template                 |   52 -
 c_glib/arrow-glib/enums.h.template                 |   41 -
 c_glib/arrow-glib/error.cpp                        |  159 -
 c_glib/arrow-glib/error.h                          |   69 -
 c_glib/arrow-glib/error.hpp                        |   73 -
 c_glib/arrow-glib/field.cpp                        |  386 --
 c_glib/arrow-glib/field.h                          |   75 -
 c_glib/arrow-glib/field.hpp                        |   28 -
 c_glib/arrow-glib/file-mode.cpp                    |   59 -
 c_glib/arrow-glib/file-mode.h                      |   40 -
 c_glib/arrow-glib/file-mode.hpp                    |   27 -
 c_glib/arrow-glib/file-system.cpp                  | 1450 -----
 c_glib/arrow-glib/file-system.h                    |  283 -
 c_glib/arrow-glib/file-system.hpp                  |   48 -
 c_glib/arrow-glib/file.cpp                         |  120 -
 c_glib/arrow-glib/file.h                           |   43 -
 c_glib/arrow-glib/file.hpp                         |   38 -
 c_glib/arrow-glib/gobject-type.h                   |  116 -
 c_glib/arrow-glib/input-stream.cpp                 | 1238 ----
 c_glib/arrow-glib/input-stream.h                   |  227 -
 c_glib/arrow-glib/input-stream.hpp                 |   50 -
 c_glib/arrow-glib/internal-hash-table.hpp          |   41 -
 c_glib/arrow-glib/internal-index.hpp               |   37 -
 c_glib/arrow-glib/ipc-options.cpp                  |  529 --
 c_glib/arrow-glib/ipc-options.h                    |   66 -
 c_glib/arrow-glib/ipc-options.hpp                  |   32 -
 c_glib/arrow-glib/local-file-system.cpp            |  211 -
 c_glib/arrow-glib/local-file-system.h              |   60 -
 c_glib/arrow-glib/local-file-system.hpp            |   32 -
 c_glib/arrow-glib/meson.build                      |  274 -
 c_glib/arrow-glib/metadata-version.cpp             |   59 -
 c_glib/arrow-glib/metadata-version.h               |   41 -
 c_glib/arrow-glib/metadata-version.hpp             |   27 -
 c_glib/arrow-glib/orc-file-reader.cpp              |  445 --
 c_glib/arrow-glib/orc-file-reader.h                |   76 -
 c_glib/arrow-glib/orc-file-reader.hpp              |   31 -
 c_glib/arrow-glib/output-stream.cpp                |  763 ---
 c_glib/arrow-glib/output-stream.h                  |  227 -
 c_glib/arrow-glib/output-stream.hpp                |   40 -
 c_glib/arrow-glib/readable.cpp                     |  114 -
 c_glib/arrow-glib/readable.h                       |   43 -
 c_glib/arrow-glib/readable.hpp                     |   39 -
 c_glib/arrow-glib/reader.cpp                       | 2187 -------
 c_glib/arrow-glib/reader.h                         |  363 --
 c_glib/arrow-glib/reader.hpp                       |   56 -
 c_glib/arrow-glib/record-batch.cpp                 |  646 --
 c_glib/arrow-glib/record-batch.h                   |  112 -
 c_glib/arrow-glib/record-batch.hpp                 |   33 -
 c_glib/arrow-glib/schema.cpp                       |  440 --
 c_glib/arrow-glib/schema.h                         |   80 -
 c_glib/arrow-glib/schema.hpp                       |   27 -
 c_glib/arrow-glib/table-builder.cpp                |  337 --
 c_glib/arrow-glib/table-builder.h                  |   69 -
 c_glib/arrow-glib/table-builder.hpp                |   27 -
 c_glib/arrow-glib/table.cpp                        |  806 ---
 c_glib/arrow-glib/table.h                          |  138 -
 c_glib/arrow-glib/table.hpp                        |   31 -
 c_glib/arrow-glib/tensor.cpp                       |  464 --
 c_glib/arrow-glib/tensor.h                         |   64 -
 c_glib/arrow-glib/tensor.hpp                       |   29 -
 c_glib/arrow-glib/type.cpp                         |  142 -
 c_glib/arrow-glib/type.h                           |  129 -
 c_glib/arrow-glib/type.hpp                         |   28 -
 c_glib/arrow-glib/version.h.in                     |  411 --
 c_glib/arrow-glib/writable-file.cpp                |   75 -
 c_glib/arrow-glib/writable-file.h                  |   39 -
 c_glib/arrow-glib/writable-file.hpp                |   39 -
 c_glib/arrow-glib/writable.cpp                     |   92 -
 c_glib/arrow-glib/writable.h                       |   40 -
 c_glib/arrow-glib/writable.hpp                     |   39 -
 c_glib/arrow-glib/writer.cpp                       |  334 --
 c_glib/arrow-glib/writer.h                         |  192 -
 c_glib/arrow-glib/writer.hpp                       |   33 -
 .../arrow-dataset-glib/arrow-dataset-glib-docs.xml |   66 -
 c_glib/doc/arrow-dataset-glib/entities.xml.in      |   24 -
 c_glib/doc/arrow-dataset-glib/meson.build          |   83 -
 c_glib/doc/arrow-glib/arrow-glib-docs.xml          |  259 -
 c_glib/doc/arrow-glib/entities.xml.in              |   24 -
 c_glib/doc/arrow-glib/meson.build                  |   93 -
 c_glib/doc/gandiva-glib/entities.xml.in            |   24 -
 c_glib/doc/gandiva-glib/gandiva-glib-docs.xml      |  128 -
 c_glib/doc/gandiva-glib/meson.build                |   83 -
 c_glib/doc/parquet-glib/entities.xml.in            |   24 -
 c_glib/doc/parquet-glib/meson.build                |   83 -
 c_glib/doc/parquet-glib/parquet-glib-docs.xml      |   93 -
 c_glib/doc/plasma-glib/entities.xml.in             |   24 -
 c_glib/doc/plasma-glib/meson.build                 |   86 -
 c_glib/doc/plasma-glib/plasma-glib-docs.xml        |   68 -
 c_glib/example/README.md                           |   48 -
 c_glib/example/build.c                             |   77 -
 c_glib/example/extension-type.c                    |  381 --
 c_glib/example/lua/README.md                       |   50 -
 c_glib/example/lua/meson.build                     |   28 -
 c_glib/example/lua/read-batch.lua                  |   44 -
 c_glib/example/lua/read-stream.lua                 |   51 -
 c_glib/example/lua/write-batch.lua                 |   74 -
 c_glib/example/lua/write-stream.lua                |   74 -
 c_glib/example/meson.build                         |   36 -
 c_glib/example/read-batch.c                        |  145 -
 c_glib/example/read-stream.c                       |  144 -
 c_glib/gandiva-glib/enums.c.template               |   52 -
 c_glib/gandiva-glib/enums.h.template               |   41 -
 c_glib/gandiva-glib/expression.cpp                 |  294 -
 c_glib/gandiva-glib/expression.h                   |   63 -
 c_glib/gandiva-glib/expression.hpp                 |   39 -
 c_glib/gandiva-glib/filter.cpp                     |  257 -
 c_glib/gandiva-glib/filter.h                       |   49 -
 c_glib/gandiva-glib/filter.hpp                     |   33 -
 c_glib/gandiva-glib/function-registry.cpp          |  116 -
 c_glib/gandiva-glib/function-registry.h            |   44 -
 c_glib/gandiva-glib/function-signature.cpp         |  243 -
 c_glib/gandiva-glib/function-signature.h           |   48 -
 c_glib/gandiva-glib/function-signature.hpp         |   27 -
 c_glib/gandiva-glib/gandiva-glib.h                 |   31 -
 c_glib/gandiva-glib/gandiva-glib.hpp               |   28 -
 c_glib/gandiva-glib/meson.build                    |  120 -
 c_glib/gandiva-glib/native-function.cpp            |  282 -
 c_glib/gandiva-glib/native-function.h              |   65 -
 c_glib/gandiva-glib/native-function.hpp            |   36 -
 c_glib/gandiva-glib/node.cpp                       | 1688 ------
 c_glib/gandiva-glib/node.h                         |  395 --
 c_glib/gandiva-glib/node.hpp                       |   49 -
 c_glib/gandiva-glib/projector.cpp                  |  391 --
 c_glib/gandiva-glib/projector.h                    |   75 -
 c_glib/gandiva-glib/projector.hpp                  |   39 -
 c_glib/gandiva-glib/selection-vector.cpp           |  323 -
 c_glib/gandiva-glib/selection-vector.h             |  128 -
 c_glib/gandiva-glib/selection-vector.hpp           |   32 -
 c_glib/gandiva-glib/version.h.in                   |  218 -
 c_glib/meson.build                                 |  181 -
 c_glib/meson_options.txt                           |   38 -
 c_glib/parquet-glib/arrow-file-reader.cpp          |  386 --
 c_glib/parquet-glib/arrow-file-reader.h            |   74 -
 c_glib/parquet-glib/arrow-file-reader.hpp          |   29 -
 c_glib/parquet-glib/arrow-file-writer.cpp          |  579 --
 c_glib/parquet-glib/arrow-file-writer.h            |  125 -
 c_glib/parquet-glib/arrow-file-writer.hpp          |   33 -
 c_glib/parquet-glib/meson.build                    |   93 -
 c_glib/parquet-glib/parquet-glib.h                 |   25 -
 c_glib/parquet-glib/parquet-glib.hpp               |   25 -
 c_glib/parquet-glib/version.h.in                   |  218 -
 c_glib/plasma-glib/client.cpp                      |  608 --
 c_glib/plasma-glib/client.h                        |   98 -
 c_glib/plasma-glib/client.hpp                      |   29 -
 c_glib/plasma-glib/meson.build                     |  107 -
 c_glib/plasma-glib/object.cpp                      |  590 --
 c_glib/plasma-glib/object.h                        |   89 -
 c_glib/plasma-glib/object.hpp                      |   47 -
 c_glib/plasma-glib/plasma-glib.h                   |   23 -
 c_glib/plasma-glib/plasma-glib.hpp                 |   25 -
 c_glib/test/dataset/test-file-format.rb            |   34 -
 c_glib/test/dataset/test-in-memory-scan-task.rb    |   59 -
 c_glib/test/dataset/test-scan-options.rb           |   47 -
 c_glib/test/file-system-tests.rb                   |  383 --
 c_glib/test/fixture/TestOrcFile.test1.orc          |  Bin 1711 -> 0 bytes
 c_glib/test/gandiva/test-binary-literal-node.rb    |   47 -
 c_glib/test/gandiva/test-boolean-literal-node.rb   |   32 -
 c_glib/test/gandiva/test-boolean-node.rb           |   38 -
 c_glib/test/gandiva/test-condition.rb              |   35 -
 c_glib/test/gandiva/test-double-literal-node.rb    |   32 -
 c_glib/test/gandiva/test-expression.rb             |   46 -
 c_glib/test/gandiva/test-field-node.rb             |   37 -
 c_glib/test/gandiva/test-filter.rb                 |   51 -
 c_glib/test/gandiva/test-float-literal-node.rb     |   32 -
 c_glib/test/gandiva/test-function-node.rb          |   43 -
 c_glib/test/gandiva/test-function-registry.rb      |   45 -
 c_glib/test/gandiva/test-function-signature.rb     |  101 -
 c_glib/test/gandiva/test-if-node.rb                |   49 -
 c_glib/test/gandiva/test-int16-literal-node.rb     |   32 -
 c_glib/test/gandiva/test-int32-literal-node.rb     |   32 -
 c_glib/test/gandiva/test-int64-literal-node.rb     |   32 -
 c_glib/test/gandiva/test-int8-literal-node.rb      |   32 -
 c_glib/test/gandiva/test-native-function.rb        |  132 -
 c_glib/test/gandiva/test-null-literal-node.rb      |   38 -
 c_glib/test/gandiva/test-projector.rb              |   63 -
 c_glib/test/gandiva/test-selectable-projector.rb   |   74 -
 c_glib/test/gandiva/test-selection-vector.rb       |   42 -
 c_glib/test/gandiva/test-string-literal-node.rb    |   32 -
 c_glib/test/gandiva/test-uint16-literal-node.rb    |   32 -
 c_glib/test/gandiva/test-uint32-literal-node.rb    |   32 -
 c_glib/test/gandiva/test-uint64-literal-node.rb    |   32 -
 c_glib/test/gandiva/test-uint8-literal-node.rb     |   32 -
 c_glib/test/helper/buildable.rb                    |  234 -
 c_glib/test/helper/data-type.rb                    |   63 -
 c_glib/test/helper/fixture.rb                      |   24 -
 c_glib/test/helper/omittable.rb                    |   36 -
 c_glib/test/helper/plasma-store.rb                 |   57 -
 c_glib/test/parquet/test-arrow-file-reader.rb      |   65 -
 c_glib/test/parquet/test-arrow-file-writer.rb      |   46 -
 c_glib/test/parquet/test-writer-properties.rb      |  103 -
 c_glib/test/plasma/test-plasma-client-options.rb   |   31 -
 c_glib/test/plasma/test-plasma-client.rb           |   94 -
 c_glib/test/plasma/test-plasma-created-object.rb   |   59 -
 c_glib/test/plasma/test-plasma-referred-object.rb  |   54 -
 c_glib/test/run-test.rb                            |   80 -
 c_glib/test/run-test.sh                            |   59 -
 c_glib/test/test-array-builder.rb                  | 1944 ------
 c_glib/test/test-array-datum.rb                    |   58 -
 c_glib/test/test-array-sort-options.rb             |   31 -
 c_glib/test/test-array.rb                          |  188 -
 c_glib/test/test-binary-array.rb                   |   61 -
 c_glib/test/test-binary-data-type.rb               |   33 -
 c_glib/test/test-boolean-array.rb                  |   81 -
 c_glib/test/test-boolean-data-type.rb              |   33 -
 c_glib/test/test-buffer-input-stream.rb            |  111 -
 c_glib/test/test-buffer-output-stream.rb           |   60 -
 c_glib/test/test-buffer.rb                         |  102 -
 c_glib/test/test-cast.rb                           |  145 -
 c_glib/test/test-chunked-array-datum.rb            |   58 -
 c_glib/test/test-chunked-array.rb                  |  141 -
 c_glib/test/test-codec.rb                          |   33 -
 c_glib/test/test-compare.rb                        |   69 -
 c_glib/test/test-compressed-input-stream.rb        |   45 -
 c_glib/test/test-compressed-output-stream.rb       |   43 -
 c_glib/test/test-count-values.rb                   |   51 -
 c_glib/test/test-count.rb                          |   46 -
 c_glib/test/test-csv-reader.rb                     |  241 -
 c_glib/test/test-cuda.rb                           |  159 -
 c_glib/test/test-date32-array.rb                   |   65 -
 c_glib/test/test-date32-data-type.rb               |   33 -
 c_glib/test/test-date64-array.rb                   |   65 -
 c_glib/test/test-date64-data-type.rb               |   33 -
 c_glib/test/test-decimal128-array.rb               |   37 -
 c_glib/test/test-decimal128-data-type.rb           |   43 -
 c_glib/test/test-decimal128.rb                     |  233 -
 c_glib/test/test-decimal256-array.rb               |   37 -
 c_glib/test/test-decimal256-data-type.rb           |   43 -
 c_glib/test/test-decimal256.rb                     |  220 -
 c_glib/test/test-dense-union-array.rb              |   88 -
 c_glib/test/test-dense-union-data-type.rb          |   64 -
 c_glib/test/test-dictionary-array-builder.rb       |  395 --
 c_glib/test/test-dictionary-array.rb               |   78 -
 c_glib/test/test-dictionary-data-type.rb           |   60 -
 c_glib/test/test-dictionary-encode.rb              |   62 -
 c_glib/test/test-double-array.rb                   |   60 -
 c_glib/test/test-double-data-type.rb               |   33 -
 c_glib/test/test-extension-data-type.rb            |  105 -
 c_glib/test/test-feather-file-reader.rb            |   71 -
 c_glib/test/test-field.rb                          |  116 -
 c_glib/test/test-file-info.rb                      |  170 -
 c_glib/test/test-file-output-stream.rb             |   38 -
 c_glib/test/test-file-selector.rb                  |   82 -
 c_glib/test/test-file-writer.rb                    |   85 -
 c_glib/test/test-filter.rb                         |  247 -
 c_glib/test/test-fixed-size-binary-array.rb        |   59 -
 c_glib/test/test-fixed-size-binary-data-type.rb    |   43 -
 c_glib/test/test-float-array.rb                    |   67 -
 c_glib/test/test-float-data-type.rb                |   33 -
 c_glib/test/test-function.rb                       |   64 -
 c_glib/test/test-gio-input-stream.rb               |   72 -
 c_glib/test/test-gio-output-stream.rb              |   79 -
 c_glib/test/test-int-array-builder.rb              |   59 -
 c_glib/test/test-int16-array.rb                    |   60 -
 c_glib/test/test-int16-data-type.rb                |   33 -
 c_glib/test/test-int32-array.rb                    |   58 -
 c_glib/test/test-int32-data-type.rb                |   33 -
 c_glib/test/test-int64-array.rb                    |   58 -
 c_glib/test/test-int64-data-type.rb                |   33 -
 c_glib/test/test-int8-array.rb                     |   65 -
 c_glib/test/test-int8-data-type.rb                 |   40 -
 c_glib/test/test-is-in.rb                          |   96 -
 c_glib/test/test-json-reader.rb                    |   90 -
 c_glib/test/test-large-binary-array.rb             |   61 -
 c_glib/test/test-large-binary-data-type.rb         |   33 -
 c_glib/test/test-large-list-array.rb               |   98 -
 c_glib/test/test-large-list-data-type.rb           |   48 -
 c_glib/test/test-large-string-array.rb             |   46 -
 c_glib/test/test-large-string-data-type.rb         |   33 -
 c_glib/test/test-list-array.rb                     |   97 -
 c_glib/test/test-list-data-type.rb                 |   48 -
 c_glib/test/test-local-file-system.rb              |   57 -
 c_glib/test/test-map-array-builder.rb              |  143 -
 c_glib/test/test-map-array.rb                      |   39 -
 c_glib/test/test-map-data-type.rb                  |   44 -
 c_glib/test/test-memory-mapped-input-stream.rb     |   84 -
 c_glib/test/test-mock-file-system.rb               |   30 -
 c_glib/test/test-mutable-buffer.rb                 |   74 -
 c_glib/test/test-null-array.rb                     |   33 -
 c_glib/test/test-null-data-type.rb                 |   33 -
 c_glib/test/test-numeric-array.rb                  |   26 -
 c_glib/test/test-orc-file-reader.rb                |  238 -
 c_glib/test/test-read-options.rb                   |   61 -
 c_glib/test/test-record-batch-builder.rb           |   86 -
 c_glib/test/test-record-batch-datum.rb             |   58 -
 c_glib/test/test-record-batch-iterator.rb          |   51 -
 c_glib/test/test-record-batch.rb                   |  185 -
 c_glib/test/test-resizable-buffer.rb               |   32 -
 c_glib/test/test-schema.rb                         |  203 -
 c_glib/test/test-slow-file-system.rb               |   43 -
 c_glib/test/test-sort-indices.rb                   |   69 -
 c_glib/test/test-sort-options.rb                   |   59 -
 c_glib/test/test-sparse-union-array.rb             |   86 -
 c_glib/test/test-sparse-union-data-type.rb         |   64 -
 c_glib/test/test-stream-writer.rb                  |   57 -
 c_glib/test/test-string-array.rb                   |   46 -
 c_glib/test/test-string-data-type.rb               |   33 -
 c_glib/test/test-struct-array.rb                   |   88 -
 c_glib/test/test-struct-data-type.rb               |  115 -
 c_glib/test/test-table-batch-reader.rb             |   42 -
 c_glib/test/test-table-datum.rb                    |   58 -
 c_glib/test/test-table.rb                          |  273 -
 c_glib/test/test-take.rb                           |  214 -
 c_glib/test/test-tensor.rb                         |  125 -
 c_glib/test/test-time-data-type.rb                 |   24 -
 c_glib/test/test-time32-array.rb                   |   69 -
 c_glib/test/test-time32-data-type.rb               |   56 -
 c_glib/test/test-time64-array.rb                   |   57 -
 c_glib/test/test-time64-data-type.rb               |   56 -
 c_glib/test/test-timestamp-array.rb                |   57 -
 c_glib/test/test-timestamp-data-type.rb            |   84 -
 c_glib/test/test-uint-array-builder.rb             |   59 -
 c_glib/test/test-uint16-array.rb                   |   60 -
 c_glib/test/test-uint16-data-type.rb               |   33 -
 c_glib/test/test-uint32-array.rb                   |   60 -
 c_glib/test/test-uint32-data-type.rb               |   33 -
 c_glib/test/test-uint64-array.rb                   |   60 -
 c_glib/test/test-uint64-data-type.rb               |   33 -
 c_glib/test/test-uint8-array.rb                    |   58 -
 c_glib/test/test-uint8-data-type.rb                |   40 -
 c_glib/test/test-unique.rb                         |   31 -
 c_glib/test/test-write-options.rb                  |  102 -
 380 files changed, 68370 deletions(-)

diff --git a/c_glib/.gitignore b/c_glib/.gitignore
deleted file mode 100644
index 0c813e8..0000000
--- a/c_glib/.gitignore
+++ /dev/null
@@ -1,70 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-Makefile
-Makefile.in
-.deps/
-.libs/
-*.gir
-*.typelib
-*.o
-*.lo
-*.la
-*~
-/LICENSE.txt
-/*.tar.gz
-/aclocal.m4
-/autom4te.cache/
-/config.h
-/config.h.in
-/config.log
-/config.status
-/config/
-/configure
-/doc/*-glib/*.txt
-/doc/*-glib/*.txt.bak
-/doc/*-glib/*.args
-/doc/*-glib/*.hierarchy
-/doc/*-glib/*.interfaces
-/doc/*-glib/*.prerequisites
-/doc/*-glib/*.signals
-/doc/*-glib/*.types
-/doc/*-glib/entities.xml
-/doc/*-glib/*.stamp
-/doc/*-glib/html/
-/doc/*-glib/xml/
-/doc/*-glib/tmpl/
-/libtool
-/m4/
-/stamp-h1
-/arrow-cuda-glib/*.pc
-/*-glib/enums.c
-/*-glib/enums.h
-/*-glib/stamp-*
-/arrow-glib/version.h
-/arrow-glib/*.pc
-/gandiva-glib/version.h
-/gandiva-glib/*.pc
-/parquet-glib/version.h
-/parquet-glib/*.pc
-/plasma-glib/*.pc
-/example/build
-/example/extension-type
-/example/read-batch
-/example/read-stream
-/gtk-doc.make
-/build/
diff --git a/c_glib/Brewfile b/c_glib/Brewfile
deleted file mode 100644
index ba65853..0000000
--- a/c_glib/Brewfile
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-brew "autoconf-archive"
-brew "gobject-introspection"
-brew "gtk-doc"
-brew "libtool"
-brew "meson"
diff --git a/c_glib/Gemfile b/c_glib/Gemfile
deleted file mode 100644
index 4b57090..0000000
--- a/c_glib/Gemfile
+++ /dev/null
@@ -1,23 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-source "https://rubygems.org/"
-
-gem "test-unit"
-gem "gobject-introspection"
diff --git a/c_glib/README.md b/c_glib/README.md
deleted file mode 100644
index ac17935..0000000
--- a/c_glib/README.md
+++ /dev/null
@@ -1,315 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Arrow GLib
-
-Arrow GLib is a wrapper library for [Arrow
-C++](https://github.com/apache/arrow/tree/master/cpp). Arrow GLib
-provides C API.
-
-Arrow GLib supports [GObject
-Introspection](https://wiki.gnome.org/action/show/Projects/GObjectIntrospection).
-It means that you can create language bindings at runtime or compile
-time.
-
-For example, you can use Apache Arrow from Ruby by Arrow GLib and
-[gobject-introspection gem](https://rubygems.org/gems/gobject-introspection)
-with the following code:
-
-```ruby
-# Generate bindings at runtime
-require "gi"
-Arrow = GI.load("Arrow")
-
-# Now, you can access arrow::BooleanArray in Arrow C++ by
-# Arrow::BooleanArray
-p Arrow::BooleanArray
-```
-
-In Ruby case, you should use
-[red-arrow gem](https://rubygems.org/gems/red-arrow). It's based on
-gobject-introspection gem. It adds many convenient features to raw
-gobject-introspection gem based bindings.
-
-## Install
-
-You can use packages or build by yourself to install Arrow GLib. It's
-recommended that you use packages.
-
-We use Meson and Ninja as build tools. If you find problems when
-installing please see [common build
-problems](https://github.com/apache/arrow/blob/master/c_glib/README.md#common-build-problems).
-
-### Packages
-
-See [install document](https://arrow.apache.org/install/) for details.
-
-### How to build by users
-
-Arrow GLib users should use released source archive to build Arrow
-GLib (replace the version number in the following commands with the
-one you use):
-
-```console
-% wget https://downloads.apache.org/arrow/arrow-3.0.0/apache-arrow-3.0.0.tar.gz
-% tar xf apache-arrow-3.0.0.tar.gz
-% cd apache-arrow-3.0.0
-```
-
-You need to build and install Arrow C++ before you build and install
-Arrow GLib. See Arrow C++ document about how to install Arrow C++.
-
-If you use macOS with [Homebrew](https://brew.sh/), you must install
-required packages.
-
-macOS:
-
-```console
-$ brew bundle
-$ meson setup c_glib.build c_glib --buildtype=release
-$ meson compile -C c_glib.build
-$ sudo meson install -C c_glib.build
-```
-
-Others:
-
-```console
-$ meson setup c_glib.build c_glib --buildtype=release
-$ meson compile -C c_glib.build
-$ sudo meson install -C build
-```
-
-### How to build by developers
-
-You need to install Arrow C++ before you install Arrow GLib. See Arrow
-C++ document about how to install Arrow C++.
-
-You need [GTK-Doc](https://www.gtk.org/gtk-doc/) and
-[GObject Introspection](https://wiki.gnome.org/Projects/GObjectIntrospection)
-to build Arrow GLib. You can install them by the followings:
-
-On Debian GNU/Linux or Ubuntu:
-
-```console
-$ sudo apt install -y -V gtk-doc-tools libgirepository1.0-dev meson ninja-build
-```
-
-On CentOS 7:
-
-```console
-$ sudo yum install -y gtk-doc gobject-introspection-devel ninja-build
-$ sudo pip3 install meson
-```
-
-On CentOS 8 or later:
-
-```console
-$ sudo dnf install -y --enablerepo=powertools gtk-doc gobject-introspection-devel ninja-build
-$ sudo pip3 install meson
-```
-
-On macOS with [Homebrew](https://brew.sh/):
-
-```console
-$ brew bundle
-```
-
-You can build and install Arrow GLib by the followings:
-
-macOS:
-
-```console
-$ XML_CATALOG_FILES=$(brew --prefix)/etc/xml/catalog
-$ meson setup c_glib.build c_glib -Dgtk_doc=true
-$ meson compile -C c_glib.build
-$ sudo meson install -C c_glib.build
-```
-
-Others:
-
-```console
-$ meson c_glib.build c_glib -Dgtk_doc=true
-$ meson compile -C c_glib.build
-$ sudo meson install -C c_glib.build
-```
-
-## Usage
-
-You can use Arrow GLib with C or other languages. If you use Arrow
-GLib with C, you use C API. If you use Arrow GLib with other
-languages, you use GObject Introspection based bindings.
-
-### C
-
-You can find API reference in the
-`/usr/local/share/gtk-doc/html/arrow-glib/` directory. If you specify
-`--prefix` to `meson`, the directory will be different.
-
-You can find example codes in the `example/` directory.
-
-### Language bindings
-
-You can use Arrow GLib with non-C languages with GObject Introspection
-based bindings. Here are languages that support GObject Introspection:
-
-  * Ruby: [red-arrow gem](https://rubygems.org/gems/red-arrow) should be used.
-    * Examples: https://github.com/red-data-tools/red-arrow/tree/master/example
-
-  * Python: [PyGObject](https://wiki.gnome.org/Projects/PyGObject) should be used. (Note that you should prefer PyArrow over Arrow GLib.)
-
-  * Lua: [LGI](https://github.com/pavouk/lgi) should be used.
-    * Examples: `example/lua/` directory.
-
-  * Go: [Go-gir-generator](https://github.com/linuxdeepin/go-gir-generator) should be used. (Note that you should use Apache Arrow for Go than Arrow GLib.)
-
-See also
-[Projects/GObjectIntrospection/Users - GNOME Wiki!](https://wiki.gnome.org/Projects/GObjectIntrospection/Users)
-for other languages.
-
-## How to run test
-
-Arrow GLib has unit tests. You can confirm that you install Arrow
-GLib correctly by running unit tests.
-
-You need to install the followings to run unit tests:
-
-  * [Ruby](https://www.ruby-lang.org/)
-  * [gobject-introspection gem](https://rubygems.org/gems/gobject-introspection)
-  * [test-unit gem](https://rubygems.org/gems/test-unit)
-
-You can install them by the followings:
-
-On Debian GNU/Linux or Ubuntu:
-
-```console
-$ sudo apt install -y -V ruby-dev
-$ sudo gem install bundler
-$ (cd c_glib && bundle install)
-```
-
-On CentOS 7 or later:
-
-```console
-$ sudo yum install -y git
-$ git clone https://github.com/sstephenson/rbenv.git ~/.rbenv
-$ git clone https://github.com/sstephenson/ruby-build.git ~/.rbenv/plugins/ruby-build
-$ echo 'export PATH="$HOME/.rbenv/bin:$PATH"' >> ~/.bash_profile
-$ echo 'eval "$(rbenv init -)"' >> ~/.bash_profile
-$ exec ${SHELL} --login
-$ sudo yum install -y gcc make patch openssl-devel readline-devel zlib-devel
-$ latest_ruby_version=$(rbenv install --list 2>&1 | grep '^[0-9]' | tail -n1)
-$ rbenv install ${latest_ruby_version}
-$ rbenv global ${latest_ruby_version}
-$ gem install bundler
-$ (cd c_glib && bundle install)
-```
-
-On macOS with [Homebrew](https://brew.sh/):
-
-```console
-$ (cd c_glib && bundle install)
-```
-
-Now, you can run unit tests by the followings:
-
-```console
-$ cd c_glib.build
-$ bundle exec ../c_glib/test/run-test.sh
-```
-
-## Common build problems
-
-### build failed - /usr/bin/ld: cannot find -larrow
-
-Arrow C++ must be installed to build Arrow GLib. Run `make install` on
-Arrow C++ build directory. In addition, on linux, you may need to run
-`sudo ldconfig`.
-
-### build failed - unable to load http://docbook.sourceforge.net/release/xsl/current/html/chunk.xsl
-
-You need to set the following environment variable on macOS:
-
-```console
-$ export XML_CATALOG_FILES="$(brew --prefix)/etc/xml/catalog"
-```
-
-### build failed - Symbol not found, referenced from `libsource-highlight.4.dylib`
-
-You may get the following error on macOS:
-
-
-```text
-dyld: Symbol not found: __ZN5boost16re_detail_10650112perl_matcherIPKcNSt3__19allocatorINS_9sub_matchIS3_EEEENS_12regex_traitsIcNS_16cpp_regex_traitsIcEEEEE14construct_initERKNS_11basic_regexIcSC_EENS_15regex_constants12_match_flagsE
-  Referenced from: /usr/local/Cellar/source-highlight/3.1.8_7/lib/libsource-highlight.4.dylib
-  Expected in: flat namespace
- in /usr/local/Cellar/source-highlight/3.1.8_7/lib/libsource-highlight.4.dylib
-```
-
-To fix this error, you need to upgrade `source-highlight`:
-
-```console
-$ brew upgrade source-highlight
-```
-
-### test failed - Failed to load shared library '...' referenced by the typelib: dlopen(...): dependent dylib '@rpath/...' not found for '...'. relative file paths not allowed '@rpath/...'
-
-You may get the following error on macOS by running test:
-
-```text
-(NULL)-WARNING **: Failed to load shared library '/usr/local/lib/libparquet-glib.400.dylib' referenced by the typelib: dlopen(/usr/local/lib/libparquet-glib.400.dylib, 0x0009): dependent dylib '@rpath/libparquet.400.dylib' not found for '/usr/local/lib/libparquet-glib.400.dylib'. relative file paths not allowed '@rpath/libparquet.400.dylib'
-        from /Library/Ruby/Gems/2.6.0/gems/gobject-introspection-3.4.3/lib/gobject-introspection/loader.rb:215:in `load_object_info'
-        from /Library/Ruby/Gems/2.6.0/gems/gobject-introspection-3.4.3/lib/gobject-introspection/loader.rb:68:in `load_info'
-        from /Library/Ruby/Gems/2.6.0/gems/gobject-introspection-3.4.3/lib/gobject-introspection/loader.rb:43:in `block in load'
-        from /Library/Ruby/Gems/2.6.0/gems/gobject-introspection-3.4.3/lib/gobject-introspection/repository.rb:34:in `block (2 levels) in each'
-        from /Library/Ruby/Gems/2.6.0/gems/gobject-introspection-3.4.3/lib/gobject-introspection/repository.rb:33:in `times'
-        from /Library/Ruby/Gems/2.6.0/gems/gobject-introspection-3.4.3/lib/gobject-introspection/repository.rb:33:in `block in each'
-        from /Library/Ruby/Gems/2.6.0/gems/gobject-introspection-3.4.3/lib/gobject-introspection/repository.rb:32:in `each'
-        from /Library/Ruby/Gems/2.6.0/gems/gobject-introspection-3.4.3/lib/gobject-introspection/repository.rb:32:in `each'
-        from /Library/Ruby/Gems/2.6.0/gems/gobject-introspection-3.4.3/lib/gobject-introspection/loader.rb:42:in `load'
-        from /Library/Ruby/Gems/2.6.0/gems/gobject-introspection-3.4.3/lib/gobject-introspection.rb:44:in `load'
-        from /Users/karlkatzen/Documents/code/arrow-dev/arrow/c_glib/test/run-test.rb:60:in `<main>'
-Traceback (most recent call last):
-        17: from /Users/karlkatzen/Documents/code/arrow-dev/arrow/c_glib/test/run-test.rb:80:in `<main>'
-        16: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/autorunner.rb:66:in `run'
-        15: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/autorunner.rb:434:in `run'
-        14: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/autorunner.rb:106:in `block in <class:AutoRunner>'
-        13: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:38:in `collect'
-        12: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:136:in `add_load_path'
-        11: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:43:in `block in collect'
-        10: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:43:in `each'
-         9: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:46:in `block (2 levels) in collect'
-         8: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:85:in `collect_recursive'
-         7: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:85:in `each'
-         6: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:87:in `block in collect_recursive'
-         5: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:112:in `collect_file'
-         4: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:136:in `add_load_path'
-         3: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:114:in `block in collect_file'
-         2: from /Library/Ruby/Gems/2.6.0/gems/test-unit-3.4.0/lib/test/unit/collector/load.rb:114:in `require'
-         1: from /Users/karlkatzen/Documents/code/arrow-dev/arrow/c_glib/test/test-extension-data-type.rb:18:in `<top (required)>'
-/Users/karlkatzen/Documents/code/arrow-dev/arrow/c_glib/test/test-extension-data-type.rb:19:in `<class:TestExtensionDataType>': uninitialized constant Arrow::ExtensionArray (NameError)
-```
-
-You can't use `@rpath` in Arrow C++. To fix this error, you need to
-build Arrow C++ with `-DARROW_INSTALL_NAME_RPATH=OFF`:
-
-```console
-$ cmake -S cpp -B cpp.build -DARROW_INSTALL_NAME_RPATH=OFF ...
-$ cmake --build cpp.build
-$ sudo cmake --build cpp.build --target install
-```
diff --git a/c_glib/arrow-cuda-glib/arrow-cuda-glib.h b/c_glib/arrow-cuda-glib/arrow-cuda-glib.h
deleted file mode 100644
index b3c7f21..0000000
--- a/c_glib/arrow-cuda-glib/arrow-cuda-glib.h
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow-glib/arrow-glib.h>
-
-#include <arrow-cuda-glib/cuda.h>
diff --git a/c_glib/arrow-cuda-glib/arrow-cuda-glib.hpp b/c_glib/arrow-cuda-glib/arrow-cuda-glib.hpp
deleted file mode 100644
index e79b43a..0000000
--- a/c_glib/arrow-cuda-glib/arrow-cuda-glib.hpp
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow-glib/arrow-glib.hpp>
-
-#include <arrow-cuda-glib/cuda.hpp>
diff --git a/c_glib/arrow-cuda-glib/cuda.cpp b/c_glib/arrow-cuda-glib/cuda.cpp
deleted file mode 100644
index 142cd6f..0000000
--- a/c_glib/arrow-cuda-glib/cuda.cpp
+++ /dev/null
@@ -1,944 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#include <arrow-glib/buffer.hpp>
-#include <arrow-glib/error.hpp>
-#include <arrow-glib/input-stream.hpp>
-#include <arrow-glib/ipc-options.hpp>
-#include <arrow-glib/output-stream.hpp>
-#include <arrow-glib/readable.hpp>
-#include <arrow-glib/record-batch.hpp>
-#include <arrow-glib/schema.hpp>
-
-#include <arrow-cuda-glib/cuda.hpp>
-
-G_BEGIN_DECLS
-
-/**
- * SECTION: cuda
- * @section_id: cuda-classes
- * @title: CUDA related classes
- * @include: arrow-cuda-glib/arrow-cuda-glib.h
- *
- * The following classes provide CUDA support for Apache Arrow data.
- *
- * #GArrowCUDADeviceManager is the starting point. You need at
- * least one #GArrowCUDAContext to process Apache Arrow data on
- * NVIDIA GPU.
- *
- * #GArrowCUDAContext is a class to keep context for one GPU. You
- * need to create #GArrowCUDAContext for each GPU that you want to
- * use. You can create #GArrowCUDAContext by
- * garrow_cuda_device_manager_get_context().
- *
- * #GArrowCUDABuffer is a class for data on GPU. You can copy data
- * on GPU to/from CPU by garrow_cuda_buffer_copy_to_host() and
- * garrow_cuda_buffer_copy_from_host(). You can share data on GPU
- * with other processes by garrow_cuda_buffer_export() and
- * garrow_cuda_buffer_new_ipc().
- *
- * #GArrowCUDAHostBuffer is a class for data on CPU that is
- * directly accessible from GPU.
- *
- * #GArrowCUDAIPCMemoryHandle is a class to share data on GPU with
- * other processes. You can export your data on GPU to other processes
- * by garrow_cuda_buffer_export() and
- * garrow_cuda_ipc_memory_handle_new(). You can import other
- * process data on GPU by garrow_cuda_ipc_memory_handle_new() and
- * garrow_cuda_buffer_new_ipc().
- *
- * #GArrowCUDABufferInputStream is a class to read data in
- * #GArrowCUDABuffer.
- *
- * #GArrowCUDABufferOutputStream is a class to write data into
- * #GArrowCUDABuffer.
- */
-
-G_DEFINE_TYPE(GArrowCUDADeviceManager,
-              garrow_cuda_device_manager,
-              G_TYPE_OBJECT)
-
-static void
-garrow_cuda_device_manager_init(GArrowCUDADeviceManager *object)
-{
-}
-
-static void
-garrow_cuda_device_manager_class_init(GArrowCUDADeviceManagerClass *klass)
-{
-}
-
-/**
- * garrow_cuda_device_manager_new:
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: A newly created #GArrowCUDADeviceManager on success,
- *   %NULL on error.
- *
- * Since: 0.8.0
- */
-GArrowCUDADeviceManager *
-garrow_cuda_device_manager_new(GError **error)
-{
-  auto arrow_manager = arrow::cuda::CudaDeviceManager::Instance();
-  if (garrow::check(error, arrow_manager, "[cuda][device-manager][new]")) {
-    auto manager = g_object_new(GARROW_CUDA_TYPE_DEVICE_MANAGER,
-                                NULL);
-    return GARROW_CUDA_DEVICE_MANAGER(manager);
-  } else {
-    return NULL;
-  }
-}
-
-/**
- * garrow_cuda_device_manager_get_context:
- * @manager: A #GArrowCUDADeviceManager.
- * @gpu_number: A GPU device number for the target context.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (transfer full): A newly created #GArrowCUDAContext on
- *   success, %NULL on error. Contexts for the same GPU device number
- *   share the same data internally.
- *
- * Since: 0.8.0
- */
-GArrowCUDAContext *
-garrow_cuda_device_manager_get_context(GArrowCUDADeviceManager *manager,
-                                       gint gpu_number,
-                                       GError **error)
-{
-  auto arrow_manager = arrow::cuda::CudaDeviceManager::Instance();
-  auto arrow_cuda_context = (*arrow_manager)->GetContext(gpu_number);
-  if (garrow::check(error, arrow_cuda_context,
-                    "[cuda][device-manager][get-context]]")) {
-    return garrow_cuda_context_new_raw(&(*arrow_cuda_context));
-  } else {
-    return NULL;
-  }
-}
-
-/**
- * garrow_cuda_device_manager_get_n_devices:
- * @manager: A #GArrowCUDADeviceManager.
- *
- * Returns: The number of GPU devices.
- *
- * Since: 0.8.0
- */
-gsize
-garrow_cuda_device_manager_get_n_devices(GArrowCUDADeviceManager *manager)
-{
-  auto arrow_manager = arrow::cuda::CudaDeviceManager::Instance();
-  return (*arrow_manager)->num_devices();
-}
-
-
-typedef struct GArrowCUDAContextPrivate_ {
-  std::shared_ptr<arrow::cuda::CudaContext> context;
-} GArrowCUDAContextPrivate;
-
-enum {
-  PROP_CONTEXT = 1
-};
-
-G_DEFINE_TYPE_WITH_PRIVATE(GArrowCUDAContext,
-                           garrow_cuda_context,
-                           G_TYPE_OBJECT)
-
-#define GARROW_CUDA_CONTEXT_GET_PRIVATE(object) \
-  static_cast<GArrowCUDAContextPrivate *>(      \
-    garrow_cuda_context_get_instance_private(   \
-      GARROW_CUDA_CONTEXT(object)))
-
-static void
-garrow_cuda_context_finalize(GObject *object)
-{
-  auto priv = GARROW_CUDA_CONTEXT_GET_PRIVATE(object);
-
-  priv->context.~shared_ptr();
-
-  G_OBJECT_CLASS(garrow_cuda_context_parent_class)->finalize(object);
-}
-
-static void
-garrow_cuda_context_set_property(GObject *object,
-                                 guint prop_id,
-                                 const GValue *value,
-                                 GParamSpec *pspec)
-{
-  auto priv = GARROW_CUDA_CONTEXT_GET_PRIVATE(object);
-
-  switch (prop_id) {
-  case PROP_CONTEXT:
-    priv->context =
-      *static_cast<std::shared_ptr<arrow::cuda::CudaContext> *>(g_value_get_pointer(value));
-    break;
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-garrow_cuda_context_get_property(GObject *object,
-                                 guint prop_id,
-                                 GValue *value,
-                                 GParamSpec *pspec)
-{
-  switch (prop_id) {
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-garrow_cuda_context_init(GArrowCUDAContext *object)
-{
-  auto priv = GARROW_CUDA_CONTEXT_GET_PRIVATE(object);
-  new(&priv->context) std::shared_ptr<arrow::cuda::CudaContext>;
-}
-
-static void
-garrow_cuda_context_class_init(GArrowCUDAContextClass *klass)
-{
-  GParamSpec *spec;
-
-  auto gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->finalize     = garrow_cuda_context_finalize;
-  gobject_class->set_property = garrow_cuda_context_set_property;
-  gobject_class->get_property = garrow_cuda_context_get_property;
-
-  /**
-   * GArrowCUDAContext:context:
-   *
-   * Since: 0.8.0
-   */
-  spec = g_param_spec_pointer("context",
-                              "Context",
-                              "The raw std::shared_ptr<arrow::cuda::CudaContext>",
-                              static_cast<GParamFlags>(G_PARAM_WRITABLE |
-                                                       G_PARAM_CONSTRUCT_ONLY));
-  g_object_class_install_property(gobject_class, PROP_CONTEXT, spec);
-}
-
-/**
- * garrow_cuda_context_get_allocated_size:
- * @context: A #GArrowCUDAContext.
- *
- * Returns: The allocated memory by this context in bytes.
- *
- * Since: 0.8.0
- */
-gint64
-garrow_cuda_context_get_allocated_size(GArrowCUDAContext *context)
-{
-  auto arrow_context = garrow_cuda_context_get_raw(context);
-  return arrow_context->bytes_allocated();
-}
-
-
-G_DEFINE_TYPE(GArrowCUDABuffer,
-              garrow_cuda_buffer,
-              GARROW_TYPE_BUFFER)
-
-static void
-garrow_cuda_buffer_init(GArrowCUDABuffer *object)
-{
-}
-
-static void
-garrow_cuda_buffer_class_init(GArrowCUDABufferClass *klass)
-{
-}
-
-/**
- * garrow_cuda_buffer_new:
- * @context: A #GArrowCUDAContext.
- * @size: The number of bytes to be allocated on GPU device for this context.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (transfer full): A newly created #GArrowCUDABuffer on
- *   success, %NULL on error.
- *
- * Since: 0.8.0
- */
-GArrowCUDABuffer *
-garrow_cuda_buffer_new(GArrowCUDAContext *context,
-                       gint64 size,
-                       GError **error)
-{
-  auto arrow_context = garrow_cuda_context_get_raw(context);
-  auto arrow_buffer = arrow_context->Allocate(size);
-  if (garrow::check(error, arrow_buffer, "[cuda][buffer][new]")) {
-    return garrow_cuda_buffer_new_raw(&(*arrow_buffer));
-  } else {
-    return NULL;
-  }
-}
-
-/**
- * garrow_cuda_buffer_new_ipc:
- * @context: A #GArrowCUDAContext.
- * @handle: A #GArrowCUDAIPCMemoryHandle to be communicated.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (transfer full): A newly created #GArrowCUDABuffer on
- *   success, %NULL on error. The buffer has data from the IPC target.
- *
- * Since: 0.8.0
- */
-GArrowCUDABuffer *
-garrow_cuda_buffer_new_ipc(GArrowCUDAContext *context,
-                           GArrowCUDAIPCMemoryHandle *handle,
-                           GError **error)
-{
-  auto arrow_context = garrow_cuda_context_get_raw(context);
-  auto arrow_handle = garrow_cuda_ipc_memory_handle_get_raw(handle);
-  auto arrow_buffer = arrow_context->OpenIpcBuffer(*arrow_handle);
-  if (garrow::check(error, arrow_buffer, "[cuda][buffer][new-ipc]")) {
-    return garrow_cuda_buffer_new_raw(&(*arrow_buffer));
-  } else {
-    return NULL;
-  }
-}
-
-/**
- * garrow_cuda_buffer_new_record_batch:
- * @context: A #GArrowCUDAContext.
- * @record_batch: A #GArrowRecordBatch to be serialized.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (transfer full): A newly created #GArrowCUDABuffer on
- *   success, %NULL on error. The buffer has serialized record batch
- *   data.
- *
- * Since: 0.8.0
- */
-GArrowCUDABuffer *
-garrow_cuda_buffer_new_record_batch(GArrowCUDAContext *context,
-                                    GArrowRecordBatch *record_batch,
-                                    GError **error)
-{
-  auto arrow_context = garrow_cuda_context_get_raw(context);
-  auto arrow_record_batch = garrow_record_batch_get_raw(record_batch);
-  auto arrow_buffer = arrow::cuda::SerializeRecordBatch(*arrow_record_batch,
-                                                        arrow_context.get());
-  if (garrow::check(error, arrow_buffer, "[cuda][buffer][new-record-batch]")) {
-    return garrow_cuda_buffer_new_raw(&(*arrow_buffer));
-  } else {
-    return NULL;
-  }
-}
-
-/**
- * garrow_cuda_buffer_copy_to_host:
- * @buffer: A #GArrowCUDABuffer.
- * @position: The offset of memory on GPU device to be copied.
- * @size: The size of memory on GPU device to be copied in bytes.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (transfer full): A #GBytes that have copied memory on CPU
- *   host on success, %NULL on error.
- *
- * Since: 0.8.0
- */
-GBytes *
-garrow_cuda_buffer_copy_to_host(GArrowCUDABuffer *buffer,
-                                gint64 position,
-                                gint64 size,
-                                GError **error)
-{
-  auto arrow_buffer = garrow_cuda_buffer_get_raw(buffer);
-  auto data = static_cast<uint8_t *>(g_malloc(size));
-  auto status = arrow_buffer->CopyToHost(position, size, data);
-  if (garrow_error_check(error, status, "[cuda][buffer][copy-to-host]")) {
-    return g_bytes_new_take(data, size);
-  } else {
-    g_free(data);
-    return NULL;
-  }
-}
-
-/**
- * garrow_cuda_buffer_copy_from_host:
- * @buffer: A #GArrowCUDABuffer.
- * @data: (array length=size): Data on CPU host to be copied.
- * @size: The size of data on CPU host to be copied in bytes.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_cuda_buffer_copy_from_host(GArrowCUDABuffer *buffer,
-                                  const guint8 *data,
-                                  gint64 size,
-                                  GError **error)
-{
-  auto arrow_buffer = garrow_cuda_buffer_get_raw(buffer);
-  auto status = arrow_buffer->CopyFromHost(0, data, size);
-  return garrow_error_check(error,
-                            status,
-                            "[cuda][buffer][copy-from-host]");
-}
-
-/**
- * garrow_cuda_buffer_export:
- * @buffer: A #GArrowCUDABuffer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (transfer full): A newly created
- *   #GArrowCUDAIPCMemoryHandle to handle the exported buffer on
- *   success, %NULL on error
- *
- * Since: 0.8.0
- */
-GArrowCUDAIPCMemoryHandle *
-garrow_cuda_buffer_export(GArrowCUDABuffer *buffer, GError **error)
-{
-  auto arrow_buffer = garrow_cuda_buffer_get_raw(buffer);
-  auto arrow_handle = arrow_buffer->ExportForIpc();
-  if (garrow::check(error, arrow_handle, "[cuda][buffer][export-for-ipc]")) {
-    return garrow_cuda_ipc_memory_handle_new_raw(&(*arrow_handle));
-  } else {
-    return NULL;
-  }
-}
-
-/**
- * garrow_cuda_buffer_get_context:
- * @buffer: A #GArrowCUDABuffer.
- *
- * Returns: (transfer full): A newly created #GArrowCUDAContext for the
- *   buffer. Contexts for the same buffer share the same data internally.
- *
- * Since: 0.8.0
- */
-GArrowCUDAContext *
-garrow_cuda_buffer_get_context(GArrowCUDABuffer *buffer)
-{
-  auto arrow_buffer = garrow_cuda_buffer_get_raw(buffer);
-  auto arrow_context = arrow_buffer->context();
-  return garrow_cuda_context_new_raw(&arrow_context);
-}
-
-/**
- * garrow_cuda_buffer_read_record_batch:
- * @buffer: A #GArrowCUDABuffer.
- * @schema: A #GArrowSchema for record batch.
- * @options: (nullable): A #GArrowReadOptions.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (transfer full): A newly created #GArrowRecordBatch on
- *   success, %NULL on error. The record batch data is located on GPU.
- *
- * Since: 0.8.0
- */
-GArrowRecordBatch *
-garrow_cuda_buffer_read_record_batch(GArrowCUDABuffer *buffer,
-                                     GArrowSchema *schema,
-                                     GArrowReadOptions *options,
-                                     GError **error)
-{
-  auto arrow_buffer = garrow_cuda_buffer_get_raw(buffer);
-  auto arrow_schema = garrow_schema_get_raw(schema);
-
-  if (options) {
-    auto arrow_options = garrow_read_options_get_raw(options);
-    auto arrow_dictionary_memo =
-      garrow_read_options_get_dictionary_memo_raw(options);
-    auto arrow_record_batch =
-      arrow::cuda::ReadRecordBatch(arrow_schema,
-                                   arrow_dictionary_memo,
-                                   arrow_buffer,
-                                   arrow_options->memory_pool);
-    if (garrow::check(error, arrow_record_batch,
-                      "[cuda][buffer][read-record-batch]")) {
-      return garrow_record_batch_new_raw(&(*arrow_record_batch));
-    } else {
-      return NULL;
-    }
-  } else {
-    auto arrow_pool = arrow::default_memory_pool();
-    auto arrow_record_batch =
-      arrow::cuda::ReadRecordBatch(arrow_schema,
-                                   nullptr,
-                                   arrow_buffer,
-                                   arrow_pool);
-    if (garrow::check(error, arrow_record_batch,
-                      "[cuda][buffer][read-record-batch]")) {
-      return garrow_record_batch_new_raw(&(*arrow_record_batch));
-    } else {
-      return NULL;
-    }
-  }
-}
-
-
-G_DEFINE_TYPE(GArrowCUDAHostBuffer,
-              garrow_cuda_host_buffer,
-              GARROW_TYPE_MUTABLE_BUFFER)
-
-static void
-garrow_cuda_host_buffer_init(GArrowCUDAHostBuffer *object)
-{
-}
-
-static void
-garrow_cuda_host_buffer_class_init(GArrowCUDAHostBufferClass *klass)
-{
-}
-
-/**
- * garrow_cuda_host_buffer_new:
- * @gpu_number: A GPU device number for the target context.
- * @size: The number of bytes to be allocated on CPU host.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: A newly created #GArrowCUDAHostBuffer on success,
- *   %NULL on error. The allocated memory is accessible from GPU
- *   device for the @context.
- *
- * Since: 0.8.0
- */
-GArrowCUDAHostBuffer *
-garrow_cuda_host_buffer_new(gint gpu_number, gint64 size, GError **error)
-{
-  auto arrow_manager = arrow::cuda::CudaDeviceManager::Instance();
-  auto arrow_buffer = (*arrow_manager)->AllocateHost(gpu_number, size);
-  if (garrow::check(error, arrow_buffer, "[cuda][host-buffer][new]")) {
-    return garrow_cuda_host_buffer_new_raw(&(*arrow_buffer));
-  } else {
-    return NULL;
-  }
-}
-
-
-typedef struct GArrowCUDAIPCMemoryHandlePrivate_ {
-  std::shared_ptr<arrow::cuda::CudaIpcMemHandle> ipc_memory_handle;
-} GArrowCUDAIPCMemoryHandlePrivate;
-
-enum {
-  PROP_IPC_MEMORY_HANDLE = 1
-};
-
-G_DEFINE_TYPE_WITH_PRIVATE(GArrowCUDAIPCMemoryHandle,
-                           garrow_cuda_ipc_memory_handle,
-                           G_TYPE_OBJECT)
-
-#define GARROW_CUDA_IPC_MEMORY_HANDLE_GET_PRIVATE(object)       \
-  static_cast<GArrowCUDAIPCMemoryHandlePrivate *>(              \
-    garrow_cuda_ipc_memory_handle_get_instance_private(         \
-      GARROW_CUDA_IPC_MEMORY_HANDLE(object)))
-
-static void
-garrow_cuda_ipc_memory_handle_finalize(GObject *object)
-{
-  auto priv = GARROW_CUDA_IPC_MEMORY_HANDLE_GET_PRIVATE(object);
-
-  priv->ipc_memory_handle = nullptr;
-
-  G_OBJECT_CLASS(garrow_cuda_ipc_memory_handle_parent_class)->finalize(object);
-}
-
-static void
-garrow_cuda_ipc_memory_handle_set_property(GObject *object,
-                                           guint prop_id,
-                                           const GValue *value,
-                                           GParamSpec *pspec)
-{
-  auto priv = GARROW_CUDA_IPC_MEMORY_HANDLE_GET_PRIVATE(object);
-
-  switch (prop_id) {
-  case PROP_IPC_MEMORY_HANDLE:
-    priv->ipc_memory_handle =
-      *static_cast<std::shared_ptr<arrow::cuda::CudaIpcMemHandle> *>(g_value_get_pointer(value));
-    break;
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-garrow_cuda_ipc_memory_handle_get_property(GObject *object,
-                                           guint prop_id,
-                                           GValue *value,
-                                           GParamSpec *pspec)
-{
-  switch (prop_id) {
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-garrow_cuda_ipc_memory_handle_init(GArrowCUDAIPCMemoryHandle *object)
-{
-}
-
-static void
-garrow_cuda_ipc_memory_handle_class_init(GArrowCUDAIPCMemoryHandleClass *klass)
-{
-  GParamSpec *spec;
-
-  auto gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->finalize     = garrow_cuda_ipc_memory_handle_finalize;
-  gobject_class->set_property = garrow_cuda_ipc_memory_handle_set_property;
-  gobject_class->get_property = garrow_cuda_ipc_memory_handle_get_property;
-
-  /**
-   * GArrowCUDAIPCMemoryHandle:ipc-memory-handle:
-   *
-   * Since: 0.8.0
-   */
-  spec = g_param_spec_pointer("ipc-memory-handle",
-                              "IPC Memory Handle",
-                              "The raw std::shared_ptr<arrow::cuda::CudaIpcMemHandle>",
-                              static_cast<GParamFlags>(G_PARAM_WRITABLE |
-                                                       G_PARAM_CONSTRUCT_ONLY));
-  g_object_class_install_property(gobject_class, PROP_IPC_MEMORY_HANDLE, spec);
-}
-
-/**
- * garrow_cuda_ipc_memory_handle_new:
- * @data: (array length=size): A serialized #GArrowCUDAIPCMemoryHandle.
- * @size: The size of data.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (transfer full): A newly created #GArrowCUDAIPCMemoryHandle
- *   on success, %NULL on error.
- *
- * Since: 0.8.0
- */
-GArrowCUDAIPCMemoryHandle *
-garrow_cuda_ipc_memory_handle_new(const guint8 *data,
-                                  gsize size,
-                                  GError **error)
-{
-  auto arrow_handle = arrow::cuda::CudaIpcMemHandle::FromBuffer(data);
-  if (garrow::check(error, arrow_handle, "[cuda][ipc-memory-handle][new]")) {
-    return garrow_cuda_ipc_memory_handle_new_raw(&(*arrow_handle));
-  } else {
-    return NULL;
-  }
-}
-
-/**
- * garrow_cuda_ipc_memory_handle_serialize:
- * @handle: A #GArrowCUDAIPCMemoryHandle.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (transfer full): A newly created #GArrowBuffer on success,
- *   %NULL on error. The buffer has serialized @handle. The serialized
- *   @handle can be deserialized by garrow_cuda_ipc_memory_handle_new()
- *   in other process.
- *
- * Since: 0.8.0
- */
-GArrowBuffer *
-garrow_cuda_ipc_memory_handle_serialize(GArrowCUDAIPCMemoryHandle *handle,
-                                        GError **error)
-{
-  auto arrow_handle = garrow_cuda_ipc_memory_handle_get_raw(handle);
-  auto arrow_buffer = arrow_handle->Serialize(arrow::default_memory_pool());
-  if (garrow::check(error, arrow_buffer,
-                    "[cuda][ipc-memory-handle][serialize]")) {
-    return garrow_buffer_new_raw(&(*arrow_buffer));
-  } else {
-    return NULL;
-  }
-}
-
-static GArrowBuffer *
-garrow_cuda_buffer_input_stream_buffer_new_raw_readable_interface(std::shared_ptr<arrow::Buffer> *arrow_buffer)
-{
-  auto arrow_cuda_buffer =
-    reinterpret_cast<std::shared_ptr<arrow::cuda::CudaBuffer> *>(arrow_buffer);
-  auto cuda_buffer = garrow_cuda_buffer_new_raw(arrow_cuda_buffer);
-  return GARROW_BUFFER(cuda_buffer);
-}
-
-static std::shared_ptr<arrow::io::Readable>
-garrow_cuda_buffer_input_stream_get_raw_readable_interface(GArrowReadable *readable)
-{
-  auto input_stream = GARROW_INPUT_STREAM(readable);
-  auto arrow_input_stream = garrow_input_stream_get_raw(input_stream);
-  return arrow_input_stream;
-}
-
-static void
-garrow_cuda_buffer_input_stream_readable_interface_init(GArrowReadableInterface *iface)
-{
-  iface->buffer_new_raw =
-    garrow_cuda_buffer_input_stream_buffer_new_raw_readable_interface;
-  iface->get_raw =
-    garrow_cuda_buffer_input_stream_get_raw_readable_interface;
-}
-
-G_DEFINE_TYPE_WITH_CODE(
-  GArrowCUDABufferInputStream,
-  garrow_cuda_buffer_input_stream,
-  GARROW_TYPE_BUFFER_INPUT_STREAM,
-  G_IMPLEMENT_INTERFACE(
-    GARROW_TYPE_READABLE,
-    garrow_cuda_buffer_input_stream_readable_interface_init))
-
-static void
-garrow_cuda_buffer_input_stream_init(GArrowCUDABufferInputStream *object)
-{
-}
-
-static void
-garrow_cuda_buffer_input_stream_class_init(GArrowCUDABufferInputStreamClass *klass)
-{
-}
-
-/**
- * garrow_cuda_buffer_input_stream_new:
- * @buffer: A #GArrowCUDABuffer.
- *
- * Returns: (transfer full): A newly created
- *   #GArrowCUDABufferInputStream.
- *
- * Since: 0.8.0
- */
-GArrowCUDABufferInputStream *
-garrow_cuda_buffer_input_stream_new(GArrowCUDABuffer *buffer)
-{
-  auto arrow_buffer = garrow_cuda_buffer_get_raw(buffer);
-  auto arrow_reader =
-    std::make_shared<arrow::cuda::CudaBufferReader>(arrow_buffer);
-  return garrow_cuda_buffer_input_stream_new_raw(&arrow_reader);
-}
-
-
-G_DEFINE_TYPE(GArrowCUDABufferOutputStream,
-              garrow_cuda_buffer_output_stream,
-              GARROW_TYPE_OUTPUT_STREAM)
-
-static void
-garrow_cuda_buffer_output_stream_init(GArrowCUDABufferOutputStream *object)
-{
-}
-
-static void
-garrow_cuda_buffer_output_stream_class_init(GArrowCUDABufferOutputStreamClass *klass)
-{
-}
-
-/**
- * garrow_cuda_buffer_output_stream_new:
- * @buffer: A #GArrowCUDABuffer.
- *
- * Returns: (transfer full): A newly created
- *   #GArrowCUDABufferOutputStream.
- *
- * Since: 0.8.0
- */
-GArrowCUDABufferOutputStream *
-garrow_cuda_buffer_output_stream_new(GArrowCUDABuffer *buffer)
-{
-  auto arrow_buffer = garrow_cuda_buffer_get_raw(buffer);
-  auto arrow_writer =
-    std::make_shared<arrow::cuda::CudaBufferWriter>(arrow_buffer);
-  return garrow_cuda_buffer_output_stream_new_raw(&arrow_writer);
-}
-
-/**
- * garrow_cuda_buffer_output_stream_set_buffer_size:
- * @stream: A #GArrowCUDABufferOutputStream.
- * @size: A size of CPU buffer in bytes.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Sets CPU buffer size. to limit `cudaMemcpy()` calls. If CPU buffer
- * size is `0`, buffering is disabled.
- *
- * The default is `0`.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_cuda_buffer_output_stream_set_buffer_size(GArrowCUDABufferOutputStream *stream,
-                                                 gint64 size,
-                                                 GError **error)
-{
-  auto arrow_stream = garrow_cuda_buffer_output_stream_get_raw(stream);
-  auto status = arrow_stream->SetBufferSize(size);
-  return garrow_error_check(error,
-                            status,
-                            "[cuda][buffer-output-stream][set-buffer-size]");
-}
-
-/**
- * garrow_cuda_buffer_output_stream_get_buffer_size:
- * @stream: A #GArrowCUDABufferOutputStream.
- *
- * Returns: The CPU buffer size in bytes.
- *
- * See garrow_cuda_buffer_output_stream_set_buffer_size() for CPU
- * buffer size details.
- *
- * Since: 0.8.0
- */
-gint64
-garrow_cuda_buffer_output_stream_get_buffer_size(GArrowCUDABufferOutputStream *stream)
-{
-  auto arrow_stream = garrow_cuda_buffer_output_stream_get_raw(stream);
-  return arrow_stream->buffer_size();
-}
-
-/**
- * garrow_cuda_buffer_output_stream_get_buffered_size:
- * @stream: A #GArrowCUDABufferOutputStream.
- *
- * Returns: The size of buffered data in bytes.
- *
- * Since: 0.8.0
- */
-gint64
-garrow_cuda_buffer_output_stream_get_buffered_size(GArrowCUDABufferOutputStream *stream)
-{
-  auto arrow_stream = garrow_cuda_buffer_output_stream_get_raw(stream);
-  return arrow_stream->num_bytes_buffered();
-}
-
-
-G_END_DECLS
-
-GArrowCUDAContext *
-garrow_cuda_context_new_raw(std::shared_ptr<arrow::cuda::CudaContext> *arrow_context)
-{
-  return GARROW_CUDA_CONTEXT(g_object_new(GARROW_CUDA_TYPE_CONTEXT,
-                                          "context", arrow_context,
-                                          NULL));
-}
-
-std::shared_ptr<arrow::cuda::CudaContext>
-garrow_cuda_context_get_raw(GArrowCUDAContext *context)
-{
-  if (!context)
-    return nullptr;
-
-  auto priv = GARROW_CUDA_CONTEXT_GET_PRIVATE(context);
-  return priv->context;
-}
-
-GArrowCUDAIPCMemoryHandle *
-garrow_cuda_ipc_memory_handle_new_raw(std::shared_ptr<arrow::cuda::CudaIpcMemHandle> *arrow_handle)
-{
-  auto handle = g_object_new(GARROW_CUDA_TYPE_IPC_MEMORY_HANDLE,
-                             "ipc-memory-handle", arrow_handle,
-                             NULL);
-  return GARROW_CUDA_IPC_MEMORY_HANDLE(handle);
-}
-
-std::shared_ptr<arrow::cuda::CudaIpcMemHandle>
-garrow_cuda_ipc_memory_handle_get_raw(GArrowCUDAIPCMemoryHandle *handle)
-{
-  if (!handle)
-    return nullptr;
-
-  auto priv = GARROW_CUDA_IPC_MEMORY_HANDLE_GET_PRIVATE(handle);
-  return priv->ipc_memory_handle;
-}
-
-GArrowCUDABuffer *
-garrow_cuda_buffer_new_raw(std::shared_ptr<arrow::cuda::CudaBuffer> *arrow_buffer)
-{
-  return GARROW_CUDA_BUFFER(g_object_new(GARROW_CUDA_TYPE_BUFFER,
-                                         "buffer", arrow_buffer,
-                                         NULL));
-}
-
-std::shared_ptr<arrow::cuda::CudaBuffer>
-garrow_cuda_buffer_get_raw(GArrowCUDABuffer *buffer)
-{
-  if (!buffer)
-    return nullptr;
-
-  auto arrow_buffer = garrow_buffer_get_raw(GARROW_BUFFER(buffer));
-  return std::static_pointer_cast<arrow::cuda::CudaBuffer>(arrow_buffer);
-}
-
-GArrowCUDAHostBuffer *
-garrow_cuda_host_buffer_new_raw(std::shared_ptr<arrow::cuda::CudaHostBuffer> *arrow_buffer)
-{
-  auto buffer = g_object_new(GARROW_CUDA_TYPE_HOST_BUFFER,
-                             "buffer", arrow_buffer,
-                             NULL);
-  return GARROW_CUDA_HOST_BUFFER(buffer);
-}
-
-std::shared_ptr<arrow::cuda::CudaHostBuffer>
-garrow_cuda_host_buffer_get_raw(GArrowCUDAHostBuffer *buffer)
-{
-  if (!buffer)
-    return nullptr;
-
-  auto arrow_buffer = garrow_buffer_get_raw(GARROW_BUFFER(buffer));
-  return std::static_pointer_cast<arrow::cuda::CudaHostBuffer>(arrow_buffer);
-}
-
-GArrowCUDABufferInputStream *
-garrow_cuda_buffer_input_stream_new_raw(std::shared_ptr<arrow::cuda::CudaBufferReader> *arrow_reader)
-{
-  auto input_stream = g_object_new(GARROW_CUDA_TYPE_BUFFER_INPUT_STREAM,
-                                   "input-stream", arrow_reader,
-                                   NULL);
-  return GARROW_CUDA_BUFFER_INPUT_STREAM(input_stream);
-}
-
-std::shared_ptr<arrow::cuda::CudaBufferReader>
-garrow_cuda_buffer_input_stream_get_raw(GArrowCUDABufferInputStream *input_stream)
-{
-  if (!input_stream)
-    return nullptr;
-
-  auto arrow_reader =
-    garrow_input_stream_get_raw(GARROW_INPUT_STREAM(input_stream));
-  return std::static_pointer_cast<arrow::cuda::CudaBufferReader>(arrow_reader);
-}
-
-GArrowCUDABufferOutputStream *
-garrow_cuda_buffer_output_stream_new_raw(std::shared_ptr<arrow::cuda::CudaBufferWriter> *arrow_writer)
-{
-  auto output_stream = g_object_new(GARROW_CUDA_TYPE_BUFFER_OUTPUT_STREAM,
-                                    "output-stream", arrow_writer,
-                                    NULL);
-  return GARROW_CUDA_BUFFER_OUTPUT_STREAM(output_stream);
-}
-
-std::shared_ptr<arrow::cuda::CudaBufferWriter>
-garrow_cuda_buffer_output_stream_get_raw(GArrowCUDABufferOutputStream *output_stream)
-{
-  if (!output_stream)
-    return nullptr;
-
-  auto arrow_writer =
-    garrow_output_stream_get_raw(GARROW_OUTPUT_STREAM(output_stream));
-  return std::static_pointer_cast<arrow::cuda::CudaBufferWriter>(arrow_writer);
-}
diff --git a/c_glib/arrow-cuda-glib/cuda.h b/c_glib/arrow-cuda-glib/cuda.h
deleted file mode 100644
index 3c98dae..0000000
--- a/c_glib/arrow-cuda-glib/cuda.h
+++ /dev/null
@@ -1,183 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow-glib/arrow-glib.h>
-
-G_BEGIN_DECLS
-
-#define GARROW_CUDA_TYPE_DEVICE_MANAGER (garrow_cuda_device_manager_get_type())
-G_DECLARE_DERIVABLE_TYPE(GArrowCUDADeviceManager,
-                         garrow_cuda_device_manager,
-                         GARROW_CUDA,
-                         DEVICE_MANAGER,
-                         GObject)
-struct _GArrowCUDADeviceManagerClass
-{
-  GObjectClass parent_class;
-};
-
-#define GARROW_CUDA_TYPE_CONTEXT (garrow_cuda_context_get_type())
-G_DECLARE_DERIVABLE_TYPE(GArrowCUDAContext,
-                         garrow_cuda_context,
-                         GARROW_CUDA,
-                         CONTEXT,
-                         GObject)
-struct _GArrowCUDAContextClass
-{
-  GObjectClass parent_class;
-};
-
-#define GARROW_CUDA_TYPE_BUFFER (garrow_cuda_buffer_get_type())
-G_DECLARE_DERIVABLE_TYPE(GArrowCUDABuffer,
-                         garrow_cuda_buffer,
-                         GARROW_CUDA,
-                         BUFFER,
-                         GArrowBuffer)
-struct _GArrowCUDABufferClass
-{
-  GArrowBufferClass parent_class;
-};
-
-#define GARROW_CUDA_TYPE_HOST_BUFFER (garrow_cuda_host_buffer_get_type())
-G_DECLARE_DERIVABLE_TYPE(GArrowCUDAHostBuffer,
-                         garrow_cuda_host_buffer,
-                         GARROW_CUDA,
-                         HOST_BUFFER,
-                         GArrowMutableBuffer)
-struct _GArrowCUDAHostBufferClass
-{
-  GArrowMutableBufferClass parent_class;
-};
-
-#define GARROW_CUDA_TYPE_IPC_MEMORY_HANDLE      \
-  (garrow_cuda_ipc_memory_handle_get_type())
-G_DECLARE_DERIVABLE_TYPE(GArrowCUDAIPCMemoryHandle,
-                         garrow_cuda_ipc_memory_handle,
-                         GARROW_CUDA,
-                         IPC_MEMORY_HANDLE,
-                         GObject)
-struct _GArrowCUDAIPCMemoryHandleClass
-{
-  GObjectClass parent_class;
-};
-
-#define GARROW_CUDA_TYPE_BUFFER_INPUT_STREAM    \
-  (garrow_cuda_buffer_input_stream_get_type())
-G_DECLARE_DERIVABLE_TYPE(GArrowCUDABufferInputStream,
-                         garrow_cuda_buffer_input_stream,
-                         GARROW_CUDA,
-                         BUFFER_INPUT_STREAM,
-                         GArrowBufferInputStream)
-struct _GArrowCUDABufferInputStreamClass
-{
-  GArrowBufferInputStreamClass parent_class;
-};
-
-#define GARROW_CUDA_TYPE_BUFFER_OUTPUT_STREAM   \
-  (garrow_cuda_buffer_output_stream_get_type())
-G_DECLARE_DERIVABLE_TYPE(GArrowCUDABufferOutputStream,
-                         garrow_cuda_buffer_output_stream,
-                         GARROW_CUDA,
-                         BUFFER_OUTPUT_STREAM,
-                         GArrowOutputStream)
-struct _GArrowCUDABufferOutputStreamClass
-{
-  GArrowOutputStreamClass parent_class;
-};
-
-GArrowCUDADeviceManager *
-garrow_cuda_device_manager_new(GError **error);
-
-GArrowCUDAContext *
-garrow_cuda_device_manager_get_context(GArrowCUDADeviceManager *manager,
-                                       gint gpu_number,
-                                       GError **error);
-gsize
-garrow_cuda_device_manager_get_n_devices(GArrowCUDADeviceManager *manager);
-
-gint64
-garrow_cuda_context_get_allocated_size(GArrowCUDAContext *context);
-
-
-GArrowCUDABuffer *
-garrow_cuda_buffer_new(GArrowCUDAContext *context,
-                       gint64 size,
-                       GError **error);
-GArrowCUDABuffer *
-garrow_cuda_buffer_new_ipc(GArrowCUDAContext *context,
-                           GArrowCUDAIPCMemoryHandle *handle,
-                           GError **error);
-GArrowCUDABuffer *
-garrow_cuda_buffer_new_record_batch(GArrowCUDAContext *context,
-                                    GArrowRecordBatch *record_batch,
-                                    GError **error);
-GBytes *
-garrow_cuda_buffer_copy_to_host(GArrowCUDABuffer *buffer,
-                                gint64 position,
-                                gint64 size,
-                                GError **error);
-gboolean
-garrow_cuda_buffer_copy_from_host(GArrowCUDABuffer *buffer,
-                                  const guint8 *data,
-                                  gint64 size,
-                                  GError **error);
-GArrowCUDAIPCMemoryHandle *
-garrow_cuda_buffer_export(GArrowCUDABuffer *buffer,
-                          GError **error);
-GArrowCUDAContext *
-garrow_cuda_buffer_get_context(GArrowCUDABuffer *buffer);
-GArrowRecordBatch *
-garrow_cuda_buffer_read_record_batch(GArrowCUDABuffer *buffer,
-                                     GArrowSchema *schema,
-                                     GArrowReadOptions *options,
-                                     GError **error);
-
-
-GArrowCUDAHostBuffer *
-garrow_cuda_host_buffer_new(gint gpu_number,
-                            gint64 size,
-                            GError **error);
-
-GArrowCUDAIPCMemoryHandle *
-garrow_cuda_ipc_memory_handle_new(const guint8 *data,
-                                  gsize size,
-                                  GError **error);
-
-GArrowBuffer *
-garrow_cuda_ipc_memory_handle_serialize(GArrowCUDAIPCMemoryHandle *handle,
-                                        GError **error);
-
-GArrowCUDABufferInputStream *
-garrow_cuda_buffer_input_stream_new(GArrowCUDABuffer *buffer);
-
-GArrowCUDABufferOutputStream *
-garrow_cuda_buffer_output_stream_new(GArrowCUDABuffer *buffer);
-
-gboolean
-garrow_cuda_buffer_output_stream_set_buffer_size(GArrowCUDABufferOutputStream *stream,
-                                                 gint64 size,
-                                                 GError **error);
-gint64
-garrow_cuda_buffer_output_stream_get_buffer_size(GArrowCUDABufferOutputStream *stream);
-gint64
-garrow_cuda_buffer_output_stream_get_buffered_size(GArrowCUDABufferOutputStream *stream);
-
-G_END_DECLS
diff --git a/c_glib/arrow-cuda-glib/cuda.hpp b/c_glib/arrow-cuda-glib/cuda.hpp
deleted file mode 100644
index 0f8985a..0000000
--- a/c_glib/arrow-cuda-glib/cuda.hpp
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow/gpu/cuda_api.h>
-
-#include <arrow-cuda-glib/cuda.h>
-
-GArrowCUDAContext *
-garrow_cuda_context_new_raw(std::shared_ptr<arrow::cuda::CudaContext> *arrow_context);
-std::shared_ptr<arrow::cuda::CudaContext>
-garrow_cuda_context_get_raw(GArrowCUDAContext *context);
-
-GArrowCUDAIPCMemoryHandle *
-garrow_cuda_ipc_memory_handle_new_raw(std::shared_ptr<arrow::cuda::CudaIpcMemHandle> *arrow_handle);
-std::shared_ptr<arrow::cuda::CudaIpcMemHandle>
-garrow_cuda_ipc_memory_handle_get_raw(GArrowCUDAIPCMemoryHandle *handle);
-
-GArrowCUDABuffer *
-garrow_cuda_buffer_new_raw(std::shared_ptr<arrow::cuda::CudaBuffer> *arrow_buffer);
-std::shared_ptr<arrow::cuda::CudaBuffer>
-garrow_cuda_buffer_get_raw(GArrowCUDABuffer *buffer);
-
-GArrowCUDAHostBuffer *
-garrow_cuda_host_buffer_new_raw(std::shared_ptr<arrow::cuda::CudaHostBuffer> *arrow_buffer);
-std::shared_ptr<arrow::cuda::CudaHostBuffer>
-garrow_cuda_host_buffer_get_raw(GArrowCUDAHostBuffer *buffer);
-
-GArrowCUDABufferInputStream *
-garrow_cuda_buffer_input_stream_new_raw(std::shared_ptr<arrow::cuda::CudaBufferReader> *arrow_reader);
-std::shared_ptr<arrow::cuda::CudaBufferReader>
-garrow_cuda_buffer_input_stream_get_raw(GArrowCUDABufferInputStream *input_stream);
-
-GArrowCUDABufferOutputStream *
-garrow_cuda_buffer_output_stream_new_raw(std::shared_ptr<arrow::cuda::CudaBufferWriter> *arrow_writer);
-std::shared_ptr<arrow::cuda::CudaBufferWriter>
-garrow_cuda_buffer_output_stream_get_raw(GArrowCUDABufferOutputStream *output_stream);
diff --git a/c_glib/arrow-cuda-glib/meson.build b/c_glib/arrow-cuda-glib/meson.build
deleted file mode 100644
index a655be0..0000000
--- a/c_glib/arrow-cuda-glib/meson.build
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- indent-tabs-mode: nil -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-sources = files(
-  'cuda.cpp',
-)
-
-c_headers = files(
-  'arrow-cuda-glib.h',
-  'cuda.h',
-)
-
-cpp_headers = files(
-  'arrow-cuda-glib.hpp',
-  'cuda.hpp',
-)
-
-headers = c_headers + cpp_headers
-install_headers(headers, subdir: 'arrow-cuda-glib')
-
-
-dependencies = [
-  arrow_cuda,
-  arrow_glib,
-]
-libarrow_cuda_glib = library('arrow-cuda-glib',
-                             sources: sources,
-                             install: true,
-                             dependencies: dependencies,
-                             include_directories: base_include_directories,
-                             soversion: so_version,
-                             version: library_version)
-arrow_cuda_glib = declare_dependency(link_with: libarrow_cuda_glib,
-                                     include_directories: base_include_directories,
-                                     dependencies: dependencies)
-
-pkgconfig.generate(libarrow_cuda_glib,
-                   filebase: 'arrow-cuda-glib',
-                   name: 'Apache Arrow CUDA GLib',
-                   description: 'C API for Apache Arrow CUDA based on GLib',
-                   version: version,
-                   requires: ['arrow-glib', 'arrow-cuda'])
-
-if have_gi
-  gir_dependencies = [
-    declare_dependency(sources: arrow_glib_gir),
-  ]
-  gir_extra_args = [
-    '--warn-all',
-    '--include-uninstalled=./arrow-glib/Arrow-1.0.gir',
-  ]
-  arrow_cuda_glib_gir = gnome.generate_gir(libarrow_cuda_glib,
-                                           dependencies: gir_dependencies,
-                                           sources: sources + c_headers,
-                                           namespace: 'ArrowCUDA',
-                                           nsversion: api_version,
-                                           identifier_prefix: 'GArrowCUDA',
-                                           symbol_prefix: 'garrow_cuda',
-                                           export_packages: 'arrow-cuda-glib',
-                                           includes: [
-                                             'Arrow-1.0',
-                                           ],
-                                           install: true,
-                                           extra_args: gir_extra_args)
-endif
diff --git a/c_glib/arrow-dataset-glib/arrow-dataset-glib.h b/c_glib/arrow-dataset-glib/arrow-dataset-glib.h
deleted file mode 100644
index ff16045..0000000
--- a/c_glib/arrow-dataset-glib/arrow-dataset-glib.h
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow-glib/arrow-glib.h>
-
-#include <arrow-dataset-glib/file-format.h>
-#include <arrow-dataset-glib/fragment.h>
-#include <arrow-dataset-glib/scanner.h>
diff --git a/c_glib/arrow-dataset-glib/arrow-dataset-glib.hpp b/c_glib/arrow-dataset-glib/arrow-dataset-glib.hpp
deleted file mode 100644
index c221825..0000000
--- a/c_glib/arrow-dataset-glib/arrow-dataset-glib.hpp
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow-glib/arrow-glib.hpp>
-
-#include <arrow-dataset-glib/file-format.hpp>
-#include <arrow-dataset-glib/fragment.hpp>
-#include <arrow-dataset-glib/scanner.hpp>
diff --git a/c_glib/arrow-dataset-glib/file-format.cpp b/c_glib/arrow-dataset-glib/file-format.cpp
deleted file mode 100644
index 7f10c9d..0000000
--- a/c_glib/arrow-dataset-glib/file-format.cpp
+++ /dev/null
@@ -1,265 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#include <arrow-glib/error.hpp>
-
-#include <arrow-dataset-glib/file-format.hpp>
-
-G_BEGIN_DECLS
-
-/**
- * SECTION: file-format
- * @section_id: file-format
- * @title: File format classes
- * @include: arrow-dataset-glib/arrow-dataset-glib.h
- *
- * #GADFileFormat is a base class for file format classes.
- *
- * #GADCSVFileFormat is a class for CSV file format.
- *
- * #GADIPCFileFormat is a class for IPC file format.
- *
- * #GADParquetFileFormat is a class for Parquet file format.
- *
- * * Since: 3.0.0
- */
-
-typedef struct GADFileFormatPrivate_ {
-  std::shared_ptr<arrow::dataset::FileFormat> file_format;
-} GADFileFormatPrivate;
-
-enum {
-  PROP_FILE_FORMAT = 1,
-};
-
-G_DEFINE_TYPE_WITH_PRIVATE(GADFileFormat,
-                           gad_file_format,
-                           G_TYPE_OBJECT)
-
-#define GAD_FILE_FORMAT_GET_PRIVATE(obj)        \
-  static_cast<GADFileFormatPrivate *>(          \
-    gad_file_format_get_instance_private(       \
-      GAD_FILE_FORMAT(obj)))
-
-static void
-gad_file_format_finalize(GObject *object)
-{
-  auto priv = GAD_FILE_FORMAT_GET_PRIVATE(object);
-
-  priv->file_format.~shared_ptr();
-
-  G_OBJECT_CLASS(gad_file_format_parent_class)->finalize(object);
-}
-
-static void
-gad_file_format_set_property(GObject *object,
-                             guint prop_id,
-                             const GValue *value,
-                             GParamSpec *pspec)
-{
-  auto priv = GAD_FILE_FORMAT_GET_PRIVATE(object);
-
-  switch (prop_id) {
-  case PROP_FILE_FORMAT:
-    priv->file_format =
-      *static_cast<std::shared_ptr<arrow::dataset::FileFormat> *>(g_value_get_pointer(value));
-    break;
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-gad_file_format_init(GADFileFormat *object)
-{
-  auto priv = GAD_FILE_FORMAT_GET_PRIVATE(object);
-  new(&priv->file_format) std::shared_ptr<arrow::dataset::FileFormat>;
-}
-
-static void
-gad_file_format_class_init(GADFileFormatClass *klass)
-{
-  auto gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->finalize     = gad_file_format_finalize;
-  gobject_class->set_property = gad_file_format_set_property;
-
-  GParamSpec *spec;
-  spec = g_param_spec_pointer("file-format",
-                              "FileFormat",
-                              "The raw std::shared<arrow::dataset::FileFormat> *",
-                              static_cast<GParamFlags>(G_PARAM_WRITABLE |
-                                                       G_PARAM_CONSTRUCT_ONLY));
-  g_object_class_install_property(gobject_class, PROP_FILE_FORMAT, spec);
-}
-
-/**
- * gad_file_format_get_type_name:
- * @file_format: A #GADFileFormat.
- *
- * Returns: The type name of @file_format.
- *
- *   It should be freed with g_free() when no longer needed.
- *
- * Since: 3.0.0
- */
-gchar *
-gad_file_format_get_type_name(GADFileFormat *file_format)
-{
-  const auto arrow_file_format = gad_file_format_get_raw(file_format);
-  const auto &type_name = arrow_file_format->type_name();
-  return g_strndup(type_name.data(), type_name.size());
-}
-
-/**
- * gad_file_format_equal:
- * @file_format: A #GADFileFormat.
- * @other_file_format: A #GADFileFormat to be compared.
- *
- * Returns: %TRUE if they are the same content file format, %FALSE otherwise.
- *
- * Since: 3.0.0
- */
-gboolean
-gad_file_format_equal(GADFileFormat *file_format,
-                      GADFileFormat *other_file_format)
-{
-  const auto arrow_file_format = gad_file_format_get_raw(file_format);
-  const auto arrow_other_file_format = gad_file_format_get_raw(other_file_format);
-  return arrow_file_format->Equals(*arrow_other_file_format);
-}
-
-
-G_DEFINE_TYPE(GADCSVFileFormat,
-              gad_csv_file_format,
-              GAD_TYPE_FILE_FORMAT)
-
-static void
-gad_csv_file_format_init(GADCSVFileFormat *object)
-{
-}
-
-static void
-gad_csv_file_format_class_init(GADCSVFileFormatClass *klass)
-{
-}
-
-/**
- * gad_csv_file_format_new:
- *
- * Returns: The newly created CSV file format.
- *
- * Since: 3.0.0
- */
-GADCSVFileFormat *
-gad_csv_file_format_new(void)
-{
-  std::shared_ptr<arrow::dataset::FileFormat> arrow_file_format =
-    std::make_shared<arrow::dataset::CsvFileFormat>();
-  return GAD_CSV_FILE_FORMAT(gad_file_format_new_raw(&arrow_file_format));
-}
-
-
-G_DEFINE_TYPE(GADIPCFileFormat,
-              gad_ipc_file_format,
-              GAD_TYPE_FILE_FORMAT)
-
-static void
-gad_ipc_file_format_init(GADIPCFileFormat *object)
-{
-}
-
-static void
-gad_ipc_file_format_class_init(GADIPCFileFormatClass *klass)
-{
-}
-
-/**
- * gad_ipc_file_format_new:
- *
- * Returns: The newly created IPC file format.
- *
- * Since: 3.0.0
- */
-GADIPCFileFormat *
-gad_ipc_file_format_new(void)
-{
-  std::shared_ptr<arrow::dataset::FileFormat> arrow_file_format =
-    std::make_shared<arrow::dataset::IpcFileFormat>();
-  return GAD_IPC_FILE_FORMAT(gad_file_format_new_raw(&arrow_file_format));
-}
-
-
-G_DEFINE_TYPE(GADParquetFileFormat,
-              gad_parquet_file_format,
-              GAD_TYPE_FILE_FORMAT)
-
-static void
-gad_parquet_file_format_init(GADParquetFileFormat *object)
-{
-}
-
-static void
-gad_parquet_file_format_class_init(GADParquetFileFormatClass *klass)
-{
-}
-
-/**
- * gad_parquet_file_format_new:
- *
- * Returns: The newly created Parquet file format.
- *
- * Since: 3.0.0
- */
-GADParquetFileFormat *
-gad_parquet_file_format_new(void)
-{
-  std::shared_ptr<arrow::dataset::FileFormat> arrow_file_format =
-    std::make_shared<arrow::dataset::ParquetFileFormat>();
-  return GAD_PARQUET_FILE_FORMAT(gad_file_format_new_raw(&arrow_file_format));
-}
-
-
-G_END_DECLS
-
-GADFileFormat *
-gad_file_format_new_raw(
-  std::shared_ptr<arrow::dataset::FileFormat> *arrow_file_format)
-{
-  GType type = GAD_TYPE_FILE_FORMAT;
-  const auto &type_name = (*arrow_file_format)->type_name();
-  if (type_name == "csv") {
-    type = GAD_TYPE_CSV_FILE_FORMAT;
-  } else if (type_name == "ipc") {
-    type = GAD_TYPE_IPC_FILE_FORMAT;
-  } else if (type_name == "parquet") {
-    type = GAD_TYPE_PARQUET_FILE_FORMAT;
-  }
-  return GAD_FILE_FORMAT(g_object_new(type,
-                                      "file-format", arrow_file_format,
-                                      NULL));
-}
-
-std::shared_ptr<arrow::dataset::FileFormat>
-gad_file_format_get_raw(GADFileFormat *file_format)
-{
-  auto priv = GAD_FILE_FORMAT_GET_PRIVATE(file_format);
-  return priv->file_format;
-}
diff --git a/c_glib/arrow-dataset-glib/file-format.h b/c_glib/arrow-dataset-glib/file-format.h
deleted file mode 100644
index f77addc..0000000
--- a/c_glib/arrow-dataset-glib/file-format.h
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow-glib/arrow-glib.h>
-
-G_BEGIN_DECLS
-
-#define GAD_TYPE_FILE_FORMAT (gad_file_format_get_type())
-G_DECLARE_DERIVABLE_TYPE(GADFileFormat,
-                         gad_file_format,
-                         GAD,
-                         FILE_FORMAT,
-                         GObject)
-struct _GADFileFormatClass
-{
-  GObjectClass parent_class;
-};
-
-GARROW_AVAILABLE_IN_3_0
-gchar *
-gad_file_format_get_type_name(GADFileFormat *file_format);
-
-GARROW_AVAILABLE_IN_3_0
-gboolean
-gad_file_format_equal(GADFileFormat *file_format,
-                      GADFileFormat *other_file_format);
-
-
-#define GAD_TYPE_CSV_FILE_FORMAT (gad_csv_file_format_get_type())
-G_DECLARE_DERIVABLE_TYPE(GADCSVFileFormat,
-                         gad_csv_file_format,
-                         GAD,
-                         CSV_FILE_FORMAT,
-                         GADFileFormat)
-struct _GADCSVFileFormatClass
-{
-  GADFileFormatClass parent_class;
-};
-
-GARROW_AVAILABLE_IN_3_0
-GADCSVFileFormat *gad_csv_file_format_new(void);
-
-
-#define GAD_TYPE_IPC_FILE_FORMAT (gad_ipc_file_format_get_type())
-G_DECLARE_DERIVABLE_TYPE(GADIPCFileFormat,
-                         gad_ipc_file_format,
-                         GAD,
-                         IPC_FILE_FORMAT,
-                         GADFileFormat)
-struct _GADIPCFileFormatClass
-{
-  GADFileFormatClass parent_class;
-};
-
-GARROW_AVAILABLE_IN_3_0
-GADIPCFileFormat *gad_ipc_file_format_new(void);
-
-
-#define GAD_TYPE_PARQUET_FILE_FORMAT (gad_parquet_file_format_get_type())
-G_DECLARE_DERIVABLE_TYPE(GADParquetFileFormat,
-                         gad_parquet_file_format,
-                         GAD,
-                         PARQUET_FILE_FORMAT,
-                         GADFileFormat)
-struct _GADParquetFileFormatClass
-{
-  GADFileFormatClass parent_class;
-};
-
-GARROW_AVAILABLE_IN_3_0
-GADParquetFileFormat *gad_parquet_file_format_new(void);
-
-
-G_END_DECLS
diff --git a/c_glib/arrow-dataset-glib/file-format.hpp b/c_glib/arrow-dataset-glib/file-format.hpp
deleted file mode 100644
index e7e73f4..0000000
--- a/c_glib/arrow-dataset-glib/file-format.hpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow/dataset/api.h>
-
-#include <arrow-dataset-glib/file-format.h>
-
-GADFileFormat *
-gad_file_format_new_raw(
-  std::shared_ptr<arrow::dataset::FileFormat> *arrow_file_format);
-std::shared_ptr<arrow::dataset::FileFormat>
-gad_file_format_get_raw(GADFileFormat *file_format);
diff --git a/c_glib/arrow-dataset-glib/fragment.cpp b/c_glib/arrow-dataset-glib/fragment.cpp
deleted file mode 100644
index 515a370..0000000
--- a/c_glib/arrow-dataset-glib/fragment.cpp
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#include <arrow-glib/record-batch.hpp>
-#include <arrow-glib/schema.hpp>
-
-#include <arrow-dataset-glib/fragment.hpp>
-
-G_BEGIN_DECLS
-
-/**
- * SECTION: fragment
- * @section_id: fragment
- * @title: Fragment classes
- * @include: arrow-dataset-glib/arrow-dataset-glib.h
- *
- * #GADFragment is a base class for all fragment classes.
- *
- * #GADInMemoryFragment is a class for in-memory fragment.
- *
- * Since: 4.0.0
- */
-
-/* arrow::dataset::Fragment */
-
-typedef struct GADFragmentPrivate_ {
-  std::shared_ptr<arrow::dataset::Fragment> fragment;
-} GADFragmentPrivate;
-
-enum {
-  PROP_FRAGMENT = 1,
-};
-
-G_DEFINE_ABSTRACT_TYPE_WITH_PRIVATE(GADFragment,
-                                    gad_fragment,
-                                    G_TYPE_OBJECT)
-
-#define GAD_FRAGMENT_GET_PRIVATE(obj)           \
-  static_cast<GADFragmentPrivate *>(            \
-    gad_fragment_get_instance_private(          \
-      GAD_FRAGMENT(obj)))
-
-static void
-gad_fragment_finalize(GObject *object)
-{
-  auto priv = GAD_FRAGMENT_GET_PRIVATE(object);
-
-  priv->fragment.~shared_ptr();
-
-  G_OBJECT_CLASS(gad_fragment_parent_class)->finalize(object);
-}
-
-static void
-gad_fragment_set_property(GObject *object,
-                          guint prop_id,
-                          const GValue *value,
-                          GParamSpec *pspec)
-{
-  auto priv = GAD_FRAGMENT_GET_PRIVATE(object);
-
-  switch (prop_id) {
-  case PROP_FRAGMENT:
-    priv->fragment =
-      *static_cast<std::shared_ptr<arrow::dataset::Fragment> *>(g_value_get_pointer(value));
-    break;
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-gad_fragment_init(GADFragment *object)
-{
-  auto priv = GAD_FRAGMENT_GET_PRIVATE(object);
-  new(&priv->fragment) std::shared_ptr<arrow::dataset::Fragment>;
-}
-
-static void
-gad_fragment_class_init(GADFragmentClass *klass)
-{
-  auto gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->finalize     = gad_fragment_finalize;
-  gobject_class->set_property = gad_fragment_set_property;
-
-  GParamSpec *spec;
-  spec = g_param_spec_pointer("fragment",
-                              "Fragment",
-                              "The raw std::shared<arrow::dataset::Fragment> *",
-                              static_cast<GParamFlags>(G_PARAM_WRITABLE |
-                                                       G_PARAM_CONSTRUCT_ONLY));
-  g_object_class_install_property(gobject_class, PROP_FRAGMENT, spec);
-}
-
-/* arrow::dataset::InMemoryFragment */
-
-G_DEFINE_TYPE(GADInMemoryFragment,
-              gad_in_memory_fragment,
-              GAD_TYPE_FRAGMENT)
-
-static void
-gad_in_memory_fragment_init(GADInMemoryFragment *object)
-{
-}
-
-static void
-gad_in_memory_fragment_class_init(GADInMemoryFragmentClass *klass)
-{
-}
-
-/**
- * gad_in_memory_fragment_new:
- * @schema: A #GArrowSchema.
- * @record_batches: (array length=n_record_batches):
- *   (element-type GArrowRecordBatch): The record batches of the table.
- * @n_record_batches: The number of record batches.
- *
- * Returns: A newly created #GADInMemoryFragment.
- *
- * Since: 4.0.0
- */
-GADInMemoryFragment *
-gad_in_memory_fragment_new(GArrowSchema *schema,
-                           GArrowRecordBatch **record_batches,
-                           gsize n_record_batches)
-{
-  auto arrow_schema = garrow_schema_get_raw(schema);
-  std::vector<std::shared_ptr<arrow::RecordBatch>> arrow_record_batches;
-  arrow_record_batches.reserve(n_record_batches);
-  for (gsize i = 0; i < n_record_batches; ++i) {
-    auto arrow_record_batch = garrow_record_batch_get_raw(record_batches[i]);
-    arrow_record_batches.push_back(arrow_record_batch);
-  }
-  auto arrow_in_memory_fragment =
-    std::make_shared<arrow::dataset::InMemoryFragment>(arrow_schema,
-                                                       arrow_record_batches);
-  return gad_in_memory_fragment_new_raw(&arrow_in_memory_fragment);
-}
-
-G_END_DECLS
-
-GADFragment *
-gad_fragment_new_raw(std::shared_ptr<arrow::dataset::Fragment> *arrow_fragment)
-{
-  auto fragment =
-    GAD_FRAGMENT(g_object_new(GAD_TYPE_FRAGMENT,
-                              "fragment", arrow_fragment,
-                              NULL));
-  return fragment;
-}
-
-std::shared_ptr<arrow::dataset::Fragment>
-gad_fragment_get_raw(GADFragment *fragment)
-{
-  auto priv = GAD_FRAGMENT_GET_PRIVATE(fragment);
-  return priv->fragment;
-}
-
-GADInMemoryFragment *
-gad_in_memory_fragment_new_raw(std::shared_ptr<arrow::dataset::InMemoryFragment> *arrow_fragment)
-{
-  auto fragment =
-    GAD_IN_MEMORY_FRAGMENT(g_object_new(GAD_TYPE_IN_MEMORY_FRAGMENT,
-                                        "fragment", arrow_fragment,
-                                        NULL));
-  return fragment;
-}
diff --git a/c_glib/arrow-dataset-glib/fragment.h b/c_glib/arrow-dataset-glib/fragment.h
deleted file mode 100644
index c0ee876..0000000
--- a/c_glib/arrow-dataset-glib/fragment.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow-glib/arrow-glib.h>
-
-G_BEGIN_DECLS
-
-/* arrow::dataset::Fragment */
-
-#define GAD_TYPE_FRAGMENT (gad_fragment_get_type())
-G_DECLARE_DERIVABLE_TYPE(GADFragment,
-                         gad_fragment,
-                         GAD,
-                         FRAGMENT,
-                         GObject)
-struct _GADFragmentClass
-{
-  GObjectClass parent_class;
-};
-
-/* arrow::dataset::InMemoryFragment */
-
-#define GAD_TYPE_IN_MEMORY_FRAGMENT (gad_in_memory_fragment_get_type())
-G_DECLARE_DERIVABLE_TYPE(GADInMemoryFragment,
-                         gad_in_memory_fragment,
-                         GAD,
-                         IN_MEMORY_FRAGMENT,
-                         GADFragment)
-struct _GADInMemoryFragmentClass
-{
-  GADFragmentClass parent_class;
-};
-
-GARROW_AVAILABLE_IN_4_0
-GADInMemoryFragment *
-gad_in_memory_fragment_new(GArrowSchema *schema,
-                           GArrowRecordBatch **record_batches,
-                           gsize n_record_batches);
-
-G_END_DECLS
diff --git a/c_glib/arrow-dataset-glib/fragment.hpp b/c_glib/arrow-dataset-glib/fragment.hpp
deleted file mode 100644
index 441b7c9..0000000
--- a/c_glib/arrow-dataset-glib/fragment.hpp
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow/dataset/api.h>
-
-#include <arrow-dataset-glib/fragment.h>
-
-std::shared_ptr<arrow::dataset::Fragment>
-gad_fragment_get_raw(GADFragment *fragment);
-
-GADFragment*
-gad_fragment_new_raw(std::shared_ptr<arrow::dataset::Fragment> *arrow_fragment);
-
-GADInMemoryFragment*
-gad_in_memory_fragment_new_raw(std::shared_ptr<arrow::dataset::InMemoryFragment> *arrow_fragment);
diff --git a/c_glib/arrow-dataset-glib/meson.build b/c_glib/arrow-dataset-glib/meson.build
deleted file mode 100644
index 83b5750..0000000
--- a/c_glib/arrow-dataset-glib/meson.build
+++ /dev/null
@@ -1,82 +0,0 @@
-# -*- indent-tabs-mode: nil -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-sources = files(
-  'file-format.cpp',
-  'fragment.cpp',
-  'scanner.cpp',
-)
-
-c_headers = files(
-  'arrow-dataset-glib.h',
-  'file-format.h',
-  'fragment.h',
-  'scanner.h',
-)
-
-cpp_headers = files(
-  'arrow-dataset-glib.hpp',
-  'file-format.hpp',
-  'fragment.hpp',
-  'scanner.hpp',
-)
-
-headers = c_headers + cpp_headers
-install_headers(headers, subdir: 'arrow-dataset-glib')
-
-dependencies = [
-  arrow_dataset,
-  arrow_glib,
-]
-libarrow_dataset_glib = library('arrow-dataset-glib',
-                                sources: sources,
-                                install: true,
-                                dependencies: dependencies,
-                                include_directories: base_include_directories,
-                                soversion: so_version,
-                                version: library_version)
-arrow_dataset_glib = declare_dependency(link_with: libarrow_dataset_glib,
-                                        include_directories: base_include_directories,
-                                        dependencies: dependencies)
-
-pkgconfig.generate(libarrow_dataset_glib,
-                   filebase: 'arrow-dataset-glib',
-                   name: 'Apache Arrow Dataset GLib',
-                   description: 'C API for Apache Arrow Dataset based on GLib',
-                   version: version,
-                   requires: ['arrow-glib', 'arrow-dataset'])
-
-if have_gi
-  gnome.generate_gir(libarrow_dataset_glib,
-                     dependencies: declare_dependency(sources: arrow_glib_gir),
-                     sources: sources + c_headers,
-                     namespace: 'ArrowDataset',
-                     nsversion: api_version,
-                     identifier_prefix: 'GAD',
-                     symbol_prefix: 'gad',
-                     export_packages: 'arrow-dataset-glib',
-                     includes: [
-                       'Arrow-1.0',
-                     ],
-                     install: true,
-                     extra_args: [
-                       '--warn-all',
-                       '--include-uninstalled=./arrow-glib/Arrow-1.0.gir',
-                     ])
-endif
diff --git a/c_glib/arrow-dataset-glib/scanner.cpp b/c_glib/arrow-dataset-glib/scanner.cpp
deleted file mode 100644
index 36701ca..0000000
--- a/c_glib/arrow-dataset-glib/scanner.cpp
+++ /dev/null
@@ -1,522 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#include <arrow/util/iterator.h>
-
-#include <arrow-glib/error.hpp>
-#include <arrow-glib/record-batch.hpp>
-#include <arrow-glib/schema.hpp>
-
-#include <arrow-dataset-glib/fragment.hpp>
-#include <arrow-dataset-glib/scanner.hpp>
-
-G_BEGIN_DECLS
-
-/**
- * SECTION: scanner
- * @section_id: scanner
- * @title: Scanner classes
- * @include: arrow-dataset-glib/arrow-dataset-glib.h
- *
- * #GADScanOptions is a class for a set of scan options.
- *
- * #GADScanTask is an abstract class for a scan task.
- *
- * #GADInMemoryScanTask is a class for a scan task of record batches.
- *
- * Since: 1.0.0
- */
-
-/* arrow::dataset::ScanOptions */
-
-typedef struct GADScanOptionsPrivate_ {
-  std::shared_ptr<arrow::dataset::ScanOptions> scan_options;
-} GADScanOptionsPrivate;
-
-enum {
-  PROP_SCAN_OPTIONS = 1,
-  PROP_FILTER,
-  PROP_EVALUATOR,
-  PROP_PROJECTOR,
-  PROP_BATCH_SIZE,
-  PROP_USE_THREADS,
-};
-
-G_DEFINE_TYPE_WITH_PRIVATE(GADScanOptions,
-                           gad_scan_options,
-                           G_TYPE_OBJECT)
-
-#define GAD_SCAN_OPTIONS_GET_PRIVATE(obj)       \
-  static_cast<GADScanOptionsPrivate *>(         \
-    gad_scan_options_get_instance_private(      \
-      GAD_SCAN_OPTIONS(obj)))
-
-static void
-gad_scan_options_finalize(GObject *object)
-{
-  auto priv = GAD_SCAN_OPTIONS_GET_PRIVATE(object);
-
-  priv->scan_options.~shared_ptr();
-
-  G_OBJECT_CLASS(gad_scan_options_parent_class)->finalize(object);
-}
-
-static void
-gad_scan_options_set_property(GObject *object,
-                              guint prop_id,
-                              const GValue *value,
-                              GParamSpec *pspec)
-{
-  auto priv = GAD_SCAN_OPTIONS_GET_PRIVATE(object);
-
-  switch (prop_id) {
-  case PROP_SCAN_OPTIONS:
-    priv->scan_options =
-      *static_cast<std::shared_ptr<arrow::dataset::ScanOptions> *>(g_value_get_pointer(value));
-    break;
-  case PROP_BATCH_SIZE:
-    priv->scan_options->batch_size = g_value_get_int64(value);
-    break;
-  case PROP_USE_THREADS:
-    priv->scan_options->use_threads = g_value_get_boolean(value);
-    break;
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-gad_scan_options_get_property(GObject *object,
-                              guint prop_id,
-                              GValue *value,
-                              GParamSpec *pspec)
-{
-  auto priv = GAD_SCAN_OPTIONS_GET_PRIVATE(object);
-
-  switch (prop_id) {
-  case PROP_BATCH_SIZE:
-    g_value_set_int64(value, priv->scan_options->batch_size);
-    break;
-  case PROP_USE_THREADS:
-    g_value_set_boolean(value, priv->scan_options->use_threads);
-    break;
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-gad_scan_options_init(GADScanOptions *object)
-{
-  auto priv = GAD_SCAN_OPTIONS_GET_PRIVATE(object);
-  new(&priv->scan_options) std::shared_ptr<arrow::dataset::ScanOptions>;
-}
-
-static void
-gad_scan_options_class_init(GADScanOptionsClass *klass)
-{
-  GObjectClass *gobject_class;
-  GParamSpec *spec;
-
-  gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->finalize     = gad_scan_options_finalize;
-  gobject_class->set_property = gad_scan_options_set_property;
-  gobject_class->get_property = gad_scan_options_get_property;
-
-  auto scan_options = std::make_shared<arrow::dataset::ScanOptions>();
-
-  spec = g_param_spec_pointer("scan-options",
-                              "ScanOptions",
-                              "The raw std::shared<arrow::dataset::ScanOptions> *",
-                              static_cast<GParamFlags>(G_PARAM_WRITABLE |
-                                                       G_PARAM_CONSTRUCT_ONLY));
-  g_object_class_install_property(gobject_class, PROP_SCAN_OPTIONS, spec);
-
-  // TODO: PROP_FILTER
-  // TODO: PROP_EVALUATOR
-  // TODO: PROP_PROJECTOR
-
-  /**
-   * GADScanOptions:batch-size:
-   *
-   * Maximum row count for scanned batches.
-   *
-   * Since: 1.0.0
-   */
-  spec = g_param_spec_int64("batch-size",
-                            "Batch size",
-                            "Maximum row count for scanned batches",
-                            0,
-                            G_MAXINT64,
-                            scan_options->batch_size,
-                            static_cast<GParamFlags>(G_PARAM_READWRITE));
-  g_object_class_install_property(gobject_class, PROP_BATCH_SIZE, spec);
-
-  /**
-   * GADScanOptions:use-threads:
-   *
-   * Indicate if the Scanner should make use of a ThreadPool.
-   *
-   * Since: 4.0.0
-   */
-  spec = g_param_spec_boolean("use-threads",
-                              "Use threads",
-                              "Indicate if the Scanner should make use of a ThreadPool",
-                              scan_options->use_threads,
-                              static_cast<GParamFlags>(G_PARAM_READWRITE));
-  g_object_class_install_property(gobject_class, PROP_USE_THREADS, spec);
-}
-
-/**
- * gad_scan_options_new:
- * @schema: A #GArrowSchema.
- *
- * Returns: A newly created #GADScanOptions.
- *
- * Since: 1.0.0
- */
-GADScanOptions *
-gad_scan_options_new(GArrowSchema *schema)
-{
-  auto arrow_schema = garrow_schema_get_raw(schema);
-  auto arrow_scan_options = std::make_shared<arrow::dataset::ScanOptions>();
-  arrow_scan_options->dataset_schema = arrow_schema;
-  return gad_scan_options_new_raw(&arrow_scan_options);
-}
-
-/**
- * gad_scan_options_get_schema:
- * @scan_options: A #GADScanOptions.
- *
- * Returns: (transfer full): A #GArrowSchema.
- *
- * Since: 1.0.0
- */
-GArrowSchema *
-gad_scan_options_get_schema(GADScanOptions *scan_options)
-{
-  auto priv = GAD_SCAN_OPTIONS_GET_PRIVATE(scan_options);
-  auto arrow_schema = priv->scan_options->dataset_schema;
-  return garrow_schema_new_raw(&arrow_schema);
-}
-
-/* arrow::dataset::ScanTask */
-
-typedef struct GADScanTaskPrivate_ {
-  std::shared_ptr<arrow::dataset::ScanTask> scan_task;
-  GADScanOptions *options;
-  GADFragment *fragment;
-} GADScanTaskPrivate;
-
-enum {
-  PROP_SCAN_TASK = 1,
-  PROP_OPTIONS,
-  PROP_FRAGMENT,
-};
-
-G_DEFINE_ABSTRACT_TYPE_WITH_PRIVATE(GADScanTask,
-                                    gad_scan_task,
-                                    G_TYPE_OBJECT)
-
-#define GAD_SCAN_TASK_GET_PRIVATE(obj)          \
-  static_cast<GADScanTaskPrivate *>(            \
-    gad_scan_task_get_instance_private(         \
-      GAD_SCAN_TASK(obj)))
-
-static void
-gad_scan_task_dispose(GObject *object)
-{
-  auto priv = GAD_SCAN_TASK_GET_PRIVATE(object);
-
-  if (priv->options) {
-    g_object_unref(priv->options);
-    priv->options = NULL;
-  }
-
-  if (priv->fragment) {
-    g_object_unref(priv->fragment);
-    priv->fragment = NULL;
-  }
-
-  G_OBJECT_CLASS(gad_scan_task_parent_class)->dispose(object);
-}
-
-static void
-gad_scan_task_finalize(GObject *object)
-{
-  auto priv = GAD_SCAN_TASK_GET_PRIVATE(object);
-
-  priv->scan_task.~shared_ptr();
-
-  G_OBJECT_CLASS(gad_scan_task_parent_class)->finalize(object);
-}
-
-static void
-gad_scan_task_set_property(GObject *object,
-                           guint prop_id,
-                           const GValue *value,
-                           GParamSpec *pspec)
-{
-  auto priv = GAD_SCAN_TASK_GET_PRIVATE(object);
-
-  switch (prop_id) {
-  case PROP_SCAN_TASK:
-    priv->scan_task =
-      *static_cast<std::shared_ptr<arrow::dataset::ScanTask> *>(g_value_get_pointer(value));
-    break;
-  case PROP_OPTIONS:
-    priv->options = GAD_SCAN_OPTIONS(g_value_dup_object(value));
-    break;
-  case PROP_FRAGMENT:
-    priv->fragment = GAD_FRAGMENT(g_value_dup_object(value));
-    break;
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-gad_scan_task_get_property(GObject *object,
-                           guint prop_id,
-                           GValue *value,
-                           GParamSpec *pspec)
-{
-  auto priv = GAD_SCAN_TASK_GET_PRIVATE(object);
-
-  switch (prop_id) {
-  case PROP_OPTIONS:
-    g_value_set_object(value, priv->options);
-    break;
-  case PROP_FRAGMENT:
-    g_value_set_object(value, priv->fragment);
-    break;
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-gad_scan_task_init(GADScanTask *object)
-{
-  auto priv = GAD_SCAN_TASK_GET_PRIVATE(object);
-  new(&priv->scan_task) std::shared_ptr<arrow::dataset::ScanTask>;
-}
-
-static void
-gad_scan_task_class_init(GADScanTaskClass *klass)
-{
-  auto gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->dispose      = gad_scan_task_dispose;
-  gobject_class->finalize     = gad_scan_task_finalize;
-  gobject_class->set_property = gad_scan_task_set_property;
-  gobject_class->get_property = gad_scan_task_get_property;
-
-  GParamSpec *spec;
-  spec = g_param_spec_pointer("scan-task",
-                              "ScanTask",
-                              "The raw std::shared<arrow::dataset::ScanTask> *",
-                              static_cast<GParamFlags>(G_PARAM_WRITABLE |
-                                                       G_PARAM_CONSTRUCT_ONLY));
-  g_object_class_install_property(gobject_class, PROP_SCAN_TASK, spec);
-
-  /**
-   * GADScanTask:options:
-   *
-   * The options of the scan task.
-   *
-   * Since: 1.0.0
-   */
-  spec = g_param_spec_object("options",
-                             "Options",
-                             "The options of the scan task",
-                             GAD_TYPE_SCAN_OPTIONS,
-                             static_cast<GParamFlags>(G_PARAM_READWRITE |
-                                                      G_PARAM_CONSTRUCT_ONLY));
-  g_object_class_install_property(gobject_class, PROP_OPTIONS, spec);
-
-  /**
-   * GADScanTask:fragment:
-   *
-   * The fragment of the scan task.
-   *
-   * Since: 4.0.0
-   */
-  spec = g_param_spec_object("fragment",
-                             "Fragment",
-                             "The fragment of the scan task",
-                             GAD_TYPE_FRAGMENT,
-                             static_cast<GParamFlags>(G_PARAM_READWRITE |
-                                                      G_PARAM_CONSTRUCT_ONLY));
-  g_object_class_install_property(gobject_class, PROP_FRAGMENT, spec);
-}
-
-/**
- * gad_scan_task_get_options:
- * @scan_task: A #GADScanTask.
- *
- * Returns: (transfer full): A #GADScanOptions.
- *
- * Since: 1.0.0
- */
-GADScanOptions *
-gad_scan_task_get_options(GADScanTask *scan_task)
-{
-  auto priv = GAD_SCAN_TASK_GET_PRIVATE(scan_task);
-  if (priv->options) {
-    g_object_ref(priv->options);
-    return priv->options;
-  }
-
-  auto arrow_options = priv->scan_task->options();
-  return gad_scan_options_new_raw(&arrow_options);
-}
-
-/**
- * gad_scan_task_get_fragment:
- * @scan_task: A #GADFragment.
- *
- * Returns: (transfer full): A #GADFragment.
- *
- * Since: 4.0.0
- */
-GADFragment *
-gad_scan_task_get_fragment(GADScanTask *scan_task)
-{
-  auto priv = GAD_SCAN_TASK_GET_PRIVATE(scan_task);
-  if (priv->fragment) {
-    g_object_ref(priv->fragment);
-    return priv->fragment;
-  }
-
-  auto arrow_fragment = priv->scan_task->fragment();
-  return gad_fragment_new_raw(&arrow_fragment);
-}
-
-/**
- * gad_scan_task_execute:
- * @scan_task: A #GADScanTask.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (nullable) (transfer full): A newly created #GArrowRecordBatchIterator,
- *   or %NULL on error.
- *
- * Since: 1.0.0
- */
-GArrowRecordBatchIterator *gad_scan_task_execute(GADScanTask *scan_task,
-                                                 GError **error)
-{
-  auto priv = GAD_SCAN_TASK_GET_PRIVATE(scan_task);
-  auto arrow_result = priv->scan_task->Execute();
-  if (garrow::check(error, arrow_result, "[datasets][scan-task][execute]")) {
-    auto arrow_record_batch_iteraor = std::move(*arrow_result);
-    return garrow_record_batch_iterator_new_raw(&arrow_record_batch_iteraor);
-  } else {
-    return NULL;
-  }
-}
-
-/* arrow::dataset::InMemoryScanTask */
-
-G_DEFINE_TYPE(GADInMemoryScanTask,
-              gad_in_memory_scan_task,
-              GAD_TYPE_SCAN_TASK)
-
-static void
-gad_in_memory_scan_task_init(GADInMemoryScanTask *object)
-{
-}
-
-static void
-gad_in_memory_scan_task_class_init(GADInMemoryScanTaskClass *klass)
-{
-}
-
-/**
- * gad_in_memory_scan_task_new:
- * @record_batches: (array length=n_record_batches):
- *   (element-type GArrowRecordBatch): The record batches of the table.
- * @n_record_batches: The number of record batches.
- * @options: A #GADScanOptions.
- * @fragment: A #GADInMemoryFragment.
- *
- * Returns: A newly created #GADInMemoryScanTask.
- *
- * Since: 1.0.0
- */
-GADInMemoryScanTask *
-gad_in_memory_scan_task_new(GArrowRecordBatch **record_batches,
-                            gsize n_record_batches,
-                            GADScanOptions *options,
-                            GADInMemoryFragment *fragment)
-{
-  std::vector<std::shared_ptr<arrow::RecordBatch>> arrow_record_batches;
-  arrow_record_batches.reserve(n_record_batches);
-  for (gsize i = 0; i < n_record_batches; ++i) {
-    auto arrow_record_batch = garrow_record_batch_get_raw(record_batches[i]);
-    arrow_record_batches.push_back(arrow_record_batch);
-  }
-  auto arrow_options = gad_scan_options_get_raw(options);
-  auto arrow_fragment = gad_fragment_get_raw(GAD_FRAGMENT(fragment));
-  auto arrow_in_memory_scan_task =
-    std::make_shared<arrow::dataset::InMemoryScanTask>(arrow_record_batches,
-                                                       arrow_options,
-                                                       arrow_fragment);
-  return gad_in_memory_scan_task_new_raw(&arrow_in_memory_scan_task,
-                                         options,
-                                         fragment);
-}
-
-G_END_DECLS
-
-GADScanOptions *
-gad_scan_options_new_raw(std::shared_ptr<arrow::dataset::ScanOptions> *arrow_scan_options)
-{
-  auto scan_options =
-    GAD_SCAN_OPTIONS(g_object_new(GAD_TYPE_SCAN_OPTIONS,
-                                  "scan-options", arrow_scan_options,
-                                  NULL));
-  return scan_options;
-}
-
-std::shared_ptr<arrow::dataset::ScanOptions>
-gad_scan_options_get_raw(GADScanOptions *scan_options)
-{
-  auto priv = GAD_SCAN_OPTIONS_GET_PRIVATE(scan_options);
-  return priv->scan_options;
-}
-
-GADInMemoryScanTask *
-gad_in_memory_scan_task_new_raw(std::shared_ptr<arrow::dataset::InMemoryScanTask> *arrow_in_memory_scan_task,
-                                GADScanOptions *options,
-                                GADInMemoryFragment *fragment)
-{
-  auto in_memory_scan_task =
-    GAD_IN_MEMORY_SCAN_TASK(g_object_new(GAD_TYPE_IN_MEMORY_SCAN_TASK,
-                                         "scan-task", arrow_in_memory_scan_task,
-                                         "options", options,
-                                         "fragment", fragment,
-                                         NULL));
-  return in_memory_scan_task;
-}
diff --git a/c_glib/arrow-dataset-glib/scanner.h b/c_glib/arrow-dataset-glib/scanner.h
deleted file mode 100644
index f387e89..0000000
--- a/c_glib/arrow-dataset-glib/scanner.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow-glib/arrow-glib.h>
-
-#include <arrow-dataset-glib/fragment.h>
-
-G_BEGIN_DECLS
-
-/* arrow::dataset::ScanOptions */
-
-#define GAD_TYPE_SCAN_OPTIONS (gad_scan_options_get_type())
-G_DECLARE_DERIVABLE_TYPE(GADScanOptions,
-                         gad_scan_options,
-                         GAD,
-                         SCAN_OPTIONS,
-                         GObject)
-struct _GADScanOptionsClass
-{
-  GObjectClass parent_class;
-};
-
-
-GARROW_AVAILABLE_IN_1_0
-GADScanOptions *gad_scan_options_new(GArrowSchema *schema);
-GARROW_AVAILABLE_IN_1_0
-GArrowSchema *gad_scan_options_get_schema(GADScanOptions *scan_options);
-
-/* arrow::dataset::ScanTask */
-
-#define GAD_TYPE_SCAN_TASK (gad_scan_task_get_type())
-G_DECLARE_DERIVABLE_TYPE(GADScanTask,
-                         gad_scan_task,
-                         GAD,
-                         SCAN_TASK,
-                         GObject)
-struct _GADScanTaskClass
-{
-  GObjectClass parent_class;
-};
-
-GARROW_AVAILABLE_IN_1_0
-GADScanOptions *gad_scan_task_get_options(GADScanTask *scan_task);
-GARROW_AVAILABLE_IN_4_0
-GADFragment *gad_scan_task_get_fragment(GADScanTask *scan_task);
-GARROW_AVAILABLE_IN_1_0
-GArrowRecordBatchIterator *gad_scan_task_execute(GADScanTask *scan_task,
-                                                 GError **error);
-
-/* arrow::dataset::InMemoryScanTask */
-
-#define GAD_TYPE_IN_MEMORY_SCAN_TASK (gad_in_memory_scan_task_get_type())
-G_DECLARE_DERIVABLE_TYPE(GADInMemoryScanTask,
-                         gad_in_memory_scan_task,
-                         GAD,
-                         IN_MEMORY_SCAN_TASK,
-                         GADScanTask)
-struct _GADInMemoryScanTaskClass
-{
-  GADScanTaskClass parent_class;
-};
-
-GARROW_AVAILABLE_IN_1_0
-GADInMemoryScanTask *
-gad_in_memory_scan_task_new(GArrowRecordBatch **record_batches,
-                            gsize n_record_batches,
-                            GADScanOptions *options,
-                            GADInMemoryFragment *fragment);
-
-G_END_DECLS
diff --git a/c_glib/arrow-dataset-glib/scanner.hpp b/c_glib/arrow-dataset-glib/scanner.hpp
deleted file mode 100644
index f10351e..0000000
--- a/c_glib/arrow-dataset-glib/scanner.hpp
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow/dataset/api.h>
-
-#include <arrow-dataset-glib/fragment.h>
-#include <arrow-dataset-glib/scanner.h>
-
-GADScanOptions *
-gad_scan_options_new_raw(std::shared_ptr<arrow::dataset::ScanOptions> *arrow_scan_options);
-std::shared_ptr<arrow::dataset::ScanOptions>
-gad_scan_options_get_raw(GADScanOptions *scan_options);
-
-GADInMemoryScanTask *
-gad_in_memory_scan_task_new_raw(std::shared_ptr<arrow::dataset::InMemoryScanTask> *arrow_in_memory_scan_task,
-                                GADScanOptions *scan_options,
-                                GADInMemoryFragment *fragment);
diff --git a/c_glib/arrow-glib/array-builder.cpp b/c_glib/arrow-glib/array-builder.cpp
deleted file mode 100644
index c9ac8f5..0000000
--- a/c_glib/arrow-glib/array-builder.cpp
+++ /dev/null
@@ -1,6178 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#include <arrow-glib/array-builder.hpp>
-#include <arrow-glib/data-type.hpp>
-#include <arrow-glib/decimal.hpp>
-#include <arrow-glib/error.hpp>
-#include <arrow-glib/type.hpp>
-
-template <typename BUILDER, typename VALUE>
-gboolean
-garrow_array_builder_append_value(GArrowArrayBuilder *builder,
-                                  VALUE value,
-                                  GError **error,
-                                  const gchar *context)
-{
-  auto arrow_builder =
-    static_cast<BUILDER>(garrow_array_builder_get_raw(builder));
-  auto status = arrow_builder->Append(value);
-  return garrow_error_check(error, status, context);
-}
-
-template <typename VALUE, typename APPEND_FUNCTION>
-gboolean
-garrow_array_builder_append_values(VALUE *values,
-                                   gint64 values_length,
-                                   const gboolean *is_valids,
-                                   gint64 is_valids_length,
-                                   GError **error,
-                                   const gchar *context,
-                                   APPEND_FUNCTION append_function)
-{
-  if (is_valids_length > 0) {
-    if (values_length != is_valids_length) {
-      g_set_error(error,
-                  GARROW_ERROR,
-                  GARROW_ERROR_INVALID,
-                  "%s: values length and is_valids length must be equal: "
-                  "<%" G_GINT64_FORMAT "> != "
-                  "<%" G_GINT64_FORMAT ">",
-                  context,
-                  values_length,
-                  is_valids_length);
-      return FALSE;
-    }
-
-    const gint64 chunk_size = 4096;
-    gint64 n_chunks = is_valids_length / chunk_size;
-    gint64 n_remains = is_valids_length % chunk_size;
-    gint64 n_loops = n_chunks;
-    if (n_remains > 0) {
-      ++n_loops;
-    }
-    for (gint64 i = 0; i < n_loops; ++i) {
-      uint8_t valid_bytes[chunk_size];
-      gint64 offset = chunk_size * i;
-      const gboolean *chunked_is_valids = is_valids + offset;
-      gint64 n_values;
-      if (i == n_chunks) {
-        n_values = n_remains;
-      } else {
-        n_values = chunk_size;
-      }
-      for (gint64 j = 0; j < n_values; ++j) {
-        valid_bytes[j] = chunked_is_valids[j];
-      }
-      auto status = append_function(values + offset,
-                                    n_values,
-                                    valid_bytes);
-      if (!garrow_error_check(error, status, context)) {
-        return FALSE;
-      }
-    }
-    return TRUE;
-  } else {
-    auto status = append_function(values, values_length, nullptr);
-    return garrow_error_check(error, status, context);
-  }
-}
-
-template <typename BUILDER, typename VALUE>
-gboolean
-garrow_array_builder_append_values(GArrowArrayBuilder *builder,
-                                   VALUE *values,
-                                   gint64 values_length,
-                                   const gboolean *is_valids,
-                                   gint64 is_valids_length,
-                                   GError **error,
-                                   const gchar *context)
-{
-  auto arrow_builder =
-    static_cast<BUILDER>(garrow_array_builder_get_raw(builder));
-  return garrow_array_builder_append_values(
-    values,
-    values_length,
-    is_valids,
-    is_valids_length,
-    error,
-    context,
-    [&arrow_builder](VALUE *values,
-                     gint64 values_length,
-                     const uint8_t *valid_bytes) -> arrow::Status {
-      return arrow_builder->AppendValues(values, values_length, valid_bytes);
-    });
-}
-
-template <typename BUILDER>
-gboolean
-garrow_array_builder_append_values(GArrowArrayBuilder *builder,
-                                   GBytes **values,
-                                   gint64 values_length,
-                                   const gboolean *is_valids,
-                                   gint64 is_valids_length,
-                                   GError **error,
-                                   const gchar *context)
-{
-  auto arrow_builder =
-    static_cast<BUILDER>(garrow_array_builder_get_raw(builder));
-  arrow::Status status;
-  if (is_valids_length > 0 && values_length != is_valids_length) {
-    g_set_error(error,
-                GARROW_ERROR,
-                GARROW_ERROR_INVALID,
-                "%s: values length and is_valids length must be equal: "
-                "<%" G_GINT64_FORMAT "> != "
-                "<%" G_GINT64_FORMAT ">",
-                context,
-                values_length,
-                is_valids_length);
-    return FALSE;
-  }
-
-  const gint64 chunk_size = 4096;
-  gint64 n_chunks = values_length / chunk_size;
-  gint64 n_remains = values_length % chunk_size;
-  gint64 n_loops = n_chunks;
-  if (n_remains > 0) {
-    ++n_loops;
-  }
-  for (gint64 i = 0; i < n_loops; ++i) {
-    std::vector<std::string> strings;
-    uint8_t *valid_bytes = nullptr;
-    uint8_t valid_bytes_buffer[chunk_size];
-    if (is_valids_length > 0) {
-      valid_bytes = valid_bytes_buffer;
-    }
-    const gint64 offset = chunk_size * i;
-    gint64 n_values;
-    if (i == n_chunks) {
-      n_values = n_remains;
-    } else {
-      n_values = chunk_size;
-    }
-    for (gint64 j = 0; j < n_values; ++j) {
-      auto value = values[offset + j];
-      size_t data_size;
-      auto raw_data = g_bytes_get_data(value, &data_size);
-      strings.push_back(std::string(static_cast<const char *>(raw_data),
-                                    data_size));
-      if (valid_bytes) {
-        valid_bytes_buffer[j] = is_valids[offset + j];
-      }
-    }
-    status = arrow_builder->AppendValues(strings, valid_bytes);
-    if (!garrow_error_check(error, status, context)) {
-      return FALSE;
-    }
-  }
-  return TRUE;
-}
-
-template <typename VALUE, typename GET_VALUE_FUNCTION>
-gboolean
-garrow_array_builder_append_values(
-  GArrowArrayBuilder *builder,
-  VALUE *values,
-  gint64 values_length,
-  const gboolean *is_valids,
-  gint64 is_valids_length,
-  GError **error,
-  const gchar *context,
-  GET_VALUE_FUNCTION get_value_function)
-{
-  auto arrow_builder =
-    static_cast<arrow::FixedSizeBinaryBuilder *>(
-      garrow_array_builder_get_raw(builder));
-  if (is_valids_length > 0 && values_length != is_valids_length) {
-    g_set_error(error,
-                GARROW_ERROR,
-                GARROW_ERROR_INVALID,
-                "%s: values length and is_valids length must be equal: "
-                "<%" G_GINT64_FORMAT "> != "
-                "<%" G_GINT64_FORMAT ">",
-                context,
-                values_length,
-                is_valids_length);
-    return FALSE;
-  }
-
-  auto value_size = arrow_builder->byte_width();
-  const gint64 chunk_size = 4096;
-  gint64 n_chunks = values_length / chunk_size;
-  gint64 n_remains = values_length % chunk_size;
-  gint64 n_loops = n_chunks;
-  if (n_remains > 0) {
-    ++n_loops;
-  }
-  for (gint64 i = 0; i < n_loops; ++i) {
-    uint8_t data[value_size * chunk_size];
-    uint8_t *valid_bytes = nullptr;
-    uint8_t valid_bytes_buffer[chunk_size];
-    if (is_valids_length > 0) {
-      valid_bytes = valid_bytes_buffer;
-    }
-    const gint64 offset = chunk_size * i;
-    gint64 n_values;
-    if (i == n_chunks) {
-      n_values = n_remains;
-    } else {
-      n_values = chunk_size;
-    }
-    for (gint64 j = 0; j < n_values; ++j) {
-      bool is_valid = true;
-      if (is_valids) {
-        is_valid = is_valids[offset + j];
-      }
-      VALUE value = nullptr;
-      if (is_valid) {
-        value = values[offset + j];
-      }
-      if (value) {
-        get_value_function(data + (value_size * j),
-                           value,
-                           value_size);
-      } else {
-        is_valid = false;
-        if (!valid_bytes) {
-          valid_bytes = valid_bytes_buffer;
-          memset(valid_bytes_buffer, true, j);
-        }
-      }
-      if (valid_bytes) {
-        valid_bytes_buffer[j] = is_valid;
-      }
-    }
-    auto status = arrow_builder->AppendValues(data, n_values, valid_bytes);
-    if (!garrow_error_check(error, status, context)) {
-      return FALSE;
-    }
-  }
-  return TRUE;
-}
-
-template <typename BUILDER>
-gboolean
-garrow_array_builder_append_values(GArrowArrayBuilder *builder,
-                                   GBytes *values,
-                                   const gboolean *is_valids,
-                                   gint64 is_valids_length,
-                                   GError **error,
-                                   const gchar *context)
-{
-  auto arrow_builder =
-    static_cast<BUILDER>(garrow_array_builder_get_raw(builder));
-  auto value_size = arrow_builder->byte_width();
-  gsize raw_values_size;
-  auto raw_values =
-    static_cast<const uint8_t *>(g_bytes_get_data(values, &raw_values_size));
-  const gint64 n_values = raw_values_size / value_size;
-  if (is_valids_length > 0 && n_values != is_valids_length) {
-    g_set_error(error,
-                GARROW_ERROR,
-                GARROW_ERROR_INVALID,
-                "%s: the number of values and is_valids length must be equal: "
-                "<%" G_GINT64_FORMAT "> != "
-                "<%" G_GINT64_FORMAT ">",
-                context,
-                n_values,
-                is_valids_length);
-    return FALSE;
-  }
-
-  if (is_valids_length == 0) {
-    auto status = arrow_builder->AppendValues(raw_values, n_values);
-    if (!garrow_error_check(error, status, context)) {
-      return FALSE;
-    }
-    return TRUE;
-  }
-
-  const gint64 chunk_size = 4096;
-  gint64 n_chunks = n_values / chunk_size;
-  gint64 n_remains = n_values % chunk_size;
-  gint64 n_loops = n_chunks;
-  if (n_remains > 0) {
-    ++n_loops;
-  }
-  for (gint64 i = 0; i < n_loops; ++i) {
-    uint8_t valid_bytes[chunk_size];
-    const auto offset = chunk_size * i;
-    gint64 n_values;
-    if (i == n_chunks) {
-      n_values = n_remains;
-    } else {
-      n_values = chunk_size;
-    }
-    for (gint64 j = 0; j < n_values; ++j) {
-      valid_bytes[j] = is_valids[offset + j];
-    }
-    auto status = arrow_builder->AppendValues(raw_values + (value_size * offset),
-                                              n_values,
-                                              valid_bytes);
-    if (!garrow_error_check(error, status, context)) {
-      return FALSE;
-    }
-  }
-  return TRUE;
-}
-
-
-G_BEGIN_DECLS
-
-/**
- * SECTION: array-builder
- * @section_id: array-builder-classes
- * @title: Array builder classes
- * @include: arrow-glib/arrow-glib.h
- *
- * #GArrowArrayBuilder is a base class for all array builder classes
- * such as #GArrowBooleanArrayBuilder.
- *
- * You need to use array builder class to create a new array.
- *
- * #GArrowNullArrayBuilder is the class to create a new
- * #GArrowNullArray.
- *
- * #GArrowBooleanArrayBuilder is the class to create a new
- * #GArrowBooleanArray.
- *
- * #GArrowIntArrayBuilder is the class to create a new integer
- * array. Integer size is automatically chosen. It's recommend that
- * you use this builder instead of specific integer size builder such
- * as #GArrowInt8ArrayBuilder.
- *
- * #GArrowUIntArrayBuilder is the class to create a new unsigned
- * integer array. Unsigned integer size is automatically chosen. It's
- * recommend that you use this builder instead of specific unsigned
- * integer size builder such as #GArrowUInt8ArrayBuilder.
- *
- * #GArrowInt8ArrayBuilder is the class to create a new
- * #GArrowInt8Array.
- *
- * #GArrowUInt8ArrayBuilder is the class to create a new
- * #GArrowUInt8Array.
- *
- * #GArrowInt16ArrayBuilder is the class to create a new
- * #GArrowInt16Array.
- *
- * #GArrowUInt16ArrayBuilder is the class to create a new
- * #GArrowUInt16Array.
- *
- * #GArrowInt32ArrayBuilder is the class to create a new
- * #GArrowInt32Array.
- *
- * #GArrowUInt32ArrayBuilder is the class to create a new
- * #GArrowUInt32Array.
- *
- * #GArrowInt64ArrayBuilder is the class to create a new
- * #GArrowInt64Array.
- *
- * #GArrowUInt64ArrayBuilder is the class to create a new
- * #GArrowUInt64Array.
- *
- * #GArrowFloatArrayBuilder is the class to creating a new
- * #GArrowFloatArray.
- *
- * #GArrowDoubleArrayBuilder is the class to create a new
- * #GArrowDoubleArray.
- *
- * #GArrowBinaryArrayBuilder is the class to create a new
- * #GArrowBinaryArray.
- *
- * #GArrowLargeBinaryArrayBuilder is the class to create a new
- * #GArrowLargeBinaryArray.
- *
- * #GArrowStringArrayBuilder is the class to create a new
- * #GArrowStringArray.
- *
- * #GArrowLargeStringArrayBuilder is the class to create a new
- * #GArrowLargeStringArray.
- *
- * #GArrowFixedSizeBinaryArrayBuilder is the class to create a new
- * #GArrowFixedSizeBinaryArray.
- *
- * #GArrowDate32ArrayBuilder is the class to create a new
- * #GArrowDate32Array.
- *
- * #GArrowDate64ArrayBuilder is the class to create a new
- * #GArrowDate64Array.
- *
- * #GArrowTimestampArrayBuilder is the class to create a new
- * #GArrowTimestampArray.
- *
- * #GArrowTime32ArrayBuilder is the class to create a new
- * #GArrowTime32Array.
- *
- * #GArrowTime64ArrayBuilder is the class to create a new
- * #GArrowTime64Array.
- *
- * #GArrowStringDictionaryArrayBuilder is the class to create a new
- * #GArrowDictionaryArray with a dictionary array of #GArrowStringArray.
- *
- * #GArrowListArrayBuilder is the class to create a new
- * #GArrowListArray.
- *
- * #GArrowLargeListArrayBuilder is the class to create a new
- * #GArrowLargeListArray.
- *
- * #GArrowStructArrayBuilder is the class to create a new
- * #GArrowStructArray.
- *
- * #GArrowMapArrayBuilder is the class to create a new
- * #GArrowMapArray.
- *
- * #GArrowDecimal128ArrayBuilder is the class to create a new
- * #GArrowDecimal128Array.
- *
- * #GArrowDecimal256ArrayBuilder is the class to create a new
- * #GArrowDecimal256Array.
- */
-
-typedef struct GArrowArrayBuilderPrivate_ {
-  arrow::ArrayBuilder *array_builder;
-  gboolean have_ownership;
-} GArrowArrayBuilderPrivate;
-
-enum {
-  PROP_0,
-  PROP_ARRAY_BUILDER
-};
-
-G_DEFINE_ABSTRACT_TYPE_WITH_PRIVATE(GArrowArrayBuilder,
-                                    garrow_array_builder,
-                                    G_TYPE_OBJECT)
-
-#define GARROW_ARRAY_BUILDER_GET_PRIVATE(obj)         \
-  static_cast<GArrowArrayBuilderPrivate *>(           \
-     garrow_array_builder_get_instance_private(       \
-       GARROW_ARRAY_BUILDER(obj)))
-
-static void
-garrow_array_builder_finalize(GObject *object)
-{
-  auto priv = GARROW_ARRAY_BUILDER_GET_PRIVATE(object);
-
-  if (priv->have_ownership) {
-    delete priv->array_builder;
-  }
-
-  G_OBJECT_CLASS(garrow_array_builder_parent_class)->finalize(object);
-}
-
-static void
-garrow_array_builder_set_property(GObject *object,
-                                  guint prop_id,
-                                  const GValue *value,
-                                  GParamSpec *pspec)
-{
-  auto priv = GARROW_ARRAY_BUILDER_GET_PRIVATE(object);
-
-  switch (prop_id) {
-  case PROP_ARRAY_BUILDER:
-    priv->array_builder =
-      static_cast<arrow::ArrayBuilder *>(g_value_get_pointer(value));
-    break;
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-garrow_array_builder_get_property(GObject *object,
-                                  guint prop_id,
-                                  GValue *value,
-                                  GParamSpec *pspec)
-{
-  switch (prop_id) {
-  default:
-    G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
-    break;
-  }
-}
-
-static void
-garrow_array_builder_init(GArrowArrayBuilder *builder)
-{
-  auto priv = GARROW_ARRAY_BUILDER_GET_PRIVATE(builder);
-  priv->have_ownership = TRUE;
-}
-
-static void
-garrow_array_builder_class_init(GArrowArrayBuilderClass *klass)
-{
-  GObjectClass *gobject_class;
-  GParamSpec *spec;
-
-  gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->finalize     = garrow_array_builder_finalize;
-  gobject_class->set_property = garrow_array_builder_set_property;
-  gobject_class->get_property = garrow_array_builder_get_property;
-
-  spec = g_param_spec_pointer("array-builder",
-                              "Array builder",
-                              "The raw arrow::ArrayBuilder *",
-                              static_cast<GParamFlags>(G_PARAM_WRITABLE |
-                                                       G_PARAM_CONSTRUCT_ONLY));
-  g_object_class_install_property(gobject_class, PROP_ARRAY_BUILDER, spec);
-}
-
-static GArrowArrayBuilder *
-garrow_array_builder_new(const std::shared_ptr<arrow::DataType> &type,
-                         GError **error,
-                         const char *context)
-{
-  auto memory_pool = arrow::default_memory_pool();
-  std::unique_ptr<arrow::ArrayBuilder> arrow_builder;
-  auto status = arrow::MakeBuilder(memory_pool, type, &arrow_builder);
-  if (!garrow_error_check(error, status, context)) {
-    return NULL;
-  }
-  return garrow_array_builder_new_raw(arrow_builder.release());
-}
-
-/**
- * garrow_array_builder_release_ownership: (skip)
- * @builder: A #GArrowArrayBuilder.
- *
- * Release ownership of `arrow::ArrayBuilder` in `builder`.
- *
- * Since: 0.8.0
- */
-void
-garrow_array_builder_release_ownership(GArrowArrayBuilder *builder)
-{
-  auto priv = GARROW_ARRAY_BUILDER_GET_PRIVATE(builder);
-  priv->have_ownership = FALSE;
-}
-
-/**
- * garrow_array_builder_get_value_data_type:
- * @builder: A #GArrowArrayBuilder.
- *
- * Returns: (transfer full): The #GArrowDataType of the value of
- *   the array builder.
- *
- * Since: 0.9.0
- */
-GArrowDataType *
-garrow_array_builder_get_value_data_type(GArrowArrayBuilder *builder)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  auto arrow_type = arrow_builder->type();
-  return garrow_data_type_new_raw(&arrow_type);
-}
-
-/**
- * garrow_array_builder_get_value_type:
- * @builder: A #GArrowArrayBuilder.
- *
- * Returns: The #GArrowType of the value of the array builder.
- *
- * Since: 0.9.0
- */
-GArrowType
-garrow_array_builder_get_value_type(GArrowArrayBuilder *builder)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  auto arrow_type = arrow_builder->type();
-  return garrow_type_from_raw(arrow_type->id());
-}
-
-/**
- * garrow_array_builder_finish:
- * @builder: A #GArrowArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (transfer full): The built #GArrowArray on success,
- *   %NULL on error.
- */
-GArrowArray *
-garrow_array_builder_finish(GArrowArrayBuilder *builder, GError **error)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  std::shared_ptr<arrow::Array> arrow_array;
-  auto status = arrow_builder->Finish(&arrow_array);
-  if (garrow_error_check(error, status, "[array-builder][finish]")) {
-    return garrow_array_new_raw(&arrow_array);
-  } else {
-    return NULL;
-  }
-}
-
-/**
- * garrow_array_builder_reset:
- * @builder: A #GArrowArrayBuilder.
- *
- * Since: 2.0.0
- */
-void
-garrow_array_builder_reset(GArrowArrayBuilder *builder)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  arrow_builder->Reset();
-}
-
-/**
- * garrow_array_builder_get_capacity:
- * @builder: A #GArrowArrayBuilder.
- *
- * Returns: The capacity of the building array.
- *
- * Since: 2.0.0
- */
-gint64
-garrow_array_builder_get_capacity(GArrowArrayBuilder *builder)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  return arrow_builder->capacity();
-}
-
-/**
- * garrow_array_builder_get_length:
- * @builder: A #GArrowArrayBuilder.
- *
- * Returns: The current length of the building array.
- *
- * Since: 2.0.0
- */
-gint64
-garrow_array_builder_get_length(GArrowArrayBuilder *builder)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  return arrow_builder->length();
-}
-
-/**
- * garrow_array_builder_get_n_nulls:
- * @builder: A #GArrowArrayBuilder.
- *
- * Returns: The current number of null elements in the building array.
- *
- * Since: 2.0.0
- */
-gint64
-garrow_array_builder_get_n_nulls(GArrowArrayBuilder *builder)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  return arrow_builder->null_count();
-}
-
-/**
- * garrow_array_builder_resize:
- * @builder: A #GArrowArrayBuilder.
- * @capacity: A new capacity.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_array_builder_resize(GArrowArrayBuilder *builder,
-                            gint64 capacity,
-                            GError **error)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  auto status = arrow_builder->Resize(capacity);
-  return garrow_error_check(error, status, "[array-builder][resize]");
-}
-
-/**
- * garrow_array_builder_reserve:
- * @builder: A #GArrowArrayBuilder.
- * @additional_capacity: The additional capacity to be reserved.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_array_builder_reserve(GArrowArrayBuilder *builder,
-                             gint64 additional_capacity,
-                             GError **error)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  auto status = arrow_builder->Reserve(additional_capacity);
-  return garrow_error_check(error, status, "[array-builder][reserve]");
-}
-
-/**
- * garrow_array_builder_append_null:
- * @builder: A #GArrowArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_array_builder_append_null(GArrowArrayBuilder *builder,
-                                 GError **error)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  auto status = arrow_builder->AppendNull();
-  return garrow_error_check(error, status, "[array-builder][append-null]");
-}
-
-/**
- * garrow_array_builder_append_nulls:
- * @builder: A #GArrowArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * garrow_array_builder_append_null() calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_array_builder_append_nulls(GArrowArrayBuilder *builder,
-                                  gint64 n,
-                                  GError **error)
-{
-  const gchar *context = "[array-builder][append-nulls]";
-  if (n < 0) {
-    g_set_error(error,
-                GARROW_ERROR,
-                GARROW_ERROR_INVALID,
-                "%s: the number of nulls must be 0 or larger: "
-                "<%" G_GINT64_FORMAT ">",
-                context,
-                n);
-    return FALSE;
-  }
-  if (n == 0) {
-    return TRUE;
-  }
-
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  auto status = arrow_builder->AppendNulls(n);
-  return garrow_error_check(error, status, context);
-}
-
-/**
- * garrow_array_builder_append_empty_value:
- * @builder: A #GArrowArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_array_builder_append_empty_value(GArrowArrayBuilder *builder,
-                                        GError **error)
-{
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  auto status = arrow_builder->AppendEmptyValue();
-  return garrow_error_check(error,
-                            status,
-                            "[array-builder][append-empty-value]");
-}
-
-/**
- * garrow_array_builder_append_empty_values:
- * @builder: A #GArrowArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple empty values at once. It's more efficient than multiple
- * garrow_array_builder_append_empty_value() calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_array_builder_append_empty_values(GArrowArrayBuilder *builder,
-                                         gint64 n,
-                                         GError **error)
-{
-  const gchar *context = "[array-builder][append-empty-values]";
-  if (n < 0) {
-    g_set_error(error,
-                GARROW_ERROR,
-                GARROW_ERROR_INVALID,
-                "%s: the number of empty values must be 0 or larger: "
-                "<%" G_GINT64_FORMAT ">",
-                context,
-                n);
-    return FALSE;
-  }
-  if (n == 0) {
-    return TRUE;
-  }
-
-  auto arrow_builder = garrow_array_builder_get_raw(builder);
-  auto status = arrow_builder->AppendEmptyValues(n);
-  return garrow_error_check(error, status, context);
-}
-
-
-G_DEFINE_TYPE(GArrowNullArrayBuilder,
-              garrow_null_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_null_array_builder_init(GArrowNullArrayBuilder *builder)
-{
-}
-
-static void
-garrow_null_array_builder_class_init(GArrowNullArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_null_array_builder_new:
- *
- * Returns: A newly created #GArrowNullArrayBuilder.
- *
- * Since: 0.13.0
- */
-GArrowNullArrayBuilder *
-garrow_null_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::null(),
-                                          NULL,
-                                          "[null-array-builder][new]");
-  return GARROW_NULL_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_null_array_builder_append_null: (skip)
- * @builder: A #GArrowNullArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.13.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_null_array_builder_append_null(GArrowNullArrayBuilder *builder,
-                                      GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_null_array_builder_append_nulls: (skip)
- * @builder: A #GArrowNullArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.13.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_null_array_builder_append_nulls(GArrowNullArrayBuilder *builder,
-                                       gint64 n,
-                                       GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowBooleanArrayBuilder,
-              garrow_boolean_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_boolean_array_builder_init(GArrowBooleanArrayBuilder *builder)
-{
-}
-
-static void
-garrow_boolean_array_builder_class_init(GArrowBooleanArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_boolean_array_builder_new:
- *
- * Returns: A newly created #GArrowBooleanArrayBuilder.
- */
-GArrowBooleanArrayBuilder *
-garrow_boolean_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::boolean(),
-                                          NULL,
-                                          "[boolean-array-builder][new]");
-  return GARROW_BOOLEAN_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_boolean_array_builder_append:
- * @builder: A #GArrowBooleanArrayBuilder.
- * @value: A boolean value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_boolean_array_builder_append_value() instead.
- */
-gboolean
-garrow_boolean_array_builder_append(GArrowBooleanArrayBuilder *builder,
-                                    gboolean value,
-                                    GError **error)
-{
-  return garrow_boolean_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_boolean_array_builder_append_value:
- * @builder: A #GArrowBooleanArrayBuilder.
- * @value: A boolean value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_boolean_array_builder_append_value(GArrowBooleanArrayBuilder *builder,
-                                          gboolean value,
-                                          GError **error)
-{
-  return garrow_array_builder_append_value<arrow::BooleanBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     static_cast<bool>(value),
-     error,
-     "[boolean-array-builder][append-value]");
-}
-
-/**
- * garrow_boolean_array_builder_append_values:
- * @builder: A #GArrowBooleanArrayBuilder.
- * @values: (array length=values_length): The array of boolean.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_boolean_array_builder_append_values(GArrowBooleanArrayBuilder *builder,
-                                           const gboolean *values,
-                                           gint64 values_length,
-                                           const gboolean *is_valids,
-                                           gint64 is_valids_length,
-                                           GError **error)
-{
-  guint8 arrow_values[values_length];
-  for (gint64 i = 0; i < values_length; ++i) {
-    arrow_values[i] = values[i];
-  }
-  return garrow_array_builder_append_values<arrow::BooleanBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     arrow_values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[boolean-array-builder][append-values]");
-}
-
-/**
- * garrow_boolean_array_builder_append_null: (skip)
- * @builder: A #GArrowBooleanArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_boolean_array_builder_append_null(GArrowBooleanArrayBuilder *builder,
-                                         GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_boolean_array_builder_append_nulls: (skip)
- * @builder: A #GArrowBooleanArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_boolean_array_builder_append_nulls(GArrowBooleanArrayBuilder *builder,
-                                          gint64 n,
-                                          GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowIntArrayBuilder,
-              garrow_int_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_int_array_builder_init(GArrowIntArrayBuilder *builder)
-{
-}
-
-static void
-garrow_int_array_builder_class_init(GArrowIntArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_int_array_builder_new:
- *
- * Returns: A newly created #GArrowIntArrayBuilder.
- *
- * Since: 0.6.0
- */
-GArrowIntArrayBuilder *
-garrow_int_array_builder_new(void)
-{
-  auto memory_pool = arrow::default_memory_pool();
-  auto arrow_builder = new arrow::AdaptiveIntBuilder(memory_pool);
-  auto builder = garrow_array_builder_new_raw(arrow_builder,
-                                              GARROW_TYPE_INT_ARRAY_BUILDER);
-  return GARROW_INT_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_int_array_builder_append:
- * @builder: A #GArrowIntArrayBuilder.
- * @value: A int value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.6.0
- *
- * Deprecated: 0.12.0:
- *   Use garrow_int_array_builder_append_value() instead.
- */
-gboolean
-garrow_int_array_builder_append(GArrowIntArrayBuilder *builder,
-                                gint64 value,
-                                GError **error)
-{
-  return garrow_int_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_int_array_builder_append_value:
- * @builder: A #GArrowIntArrayBuilder.
- * @value: A int value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_int_array_builder_append_value(GArrowIntArrayBuilder *builder,
-                                      gint64 value,
-                                      GError **error)
-{
-  return garrow_array_builder_append_value<arrow::AdaptiveIntBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[int-array-builder][append-value]");
-}
-
-/**
- * garrow_int_array_builder_append_values:
- * @builder: A #GArrowIntArrayBuilder.
- * @values: (array length=values_length): The array of int.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_int_array_builder_append_values(GArrowIntArrayBuilder *builder,
-                                       const gint64 *values,
-                                       gint64 values_length,
-                                       const gboolean *is_valids,
-                                       gint64 is_valids_length,
-                                       GError **error)
-{
-  return garrow_array_builder_append_values<arrow::AdaptiveIntBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     reinterpret_cast<const int64_t *>(values),
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[int-array-builder][append-values]");
-}
-
-/**
- * garrow_int_array_builder_append_null: (skip)
- * @builder: A #GArrowIntArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.6.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_int_array_builder_append_null(GArrowIntArrayBuilder *builder,
-                                     GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_int_array_builder_append_nulls: (skip)
- * @builder: A #GArrowIntArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_int_array_builder_append_nulls(GArrowIntArrayBuilder *builder,
-                                      gint64 n,
-                                      GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowUIntArrayBuilder,
-              garrow_uint_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_uint_array_builder_init(GArrowUIntArrayBuilder *builder)
-{
-}
-
-static void
-garrow_uint_array_builder_class_init(GArrowUIntArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_uint_array_builder_new:
- *
- * Returns: A newly created #GArrowUIntArrayBuilder.
- *
- * Since: 0.8.0
- */
-GArrowUIntArrayBuilder *
-garrow_uint_array_builder_new(void)
-{
-  auto memory_pool = arrow::default_memory_pool();
-  auto arrow_builder = new arrow::AdaptiveUIntBuilder(memory_pool);
-  auto builder = garrow_array_builder_new_raw(arrow_builder,
-                                              GARROW_TYPE_UINT_ARRAY_BUILDER);
-  return GARROW_UINT_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_uint_array_builder_append:
- * @builder: A #GArrowUIntArrayBuilder.
- * @value: A unsigned int value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 0.12.0:
- *   Use garrow_uint_array_builder_append_value() instead.
- */
-gboolean
-garrow_uint_array_builder_append(GArrowUIntArrayBuilder *builder,
-                                 guint64 value,
-                                 GError **error)
-{
-  return garrow_uint_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_uint_array_builder_append_value:
- * @builder: A #GArrowUIntArrayBuilder.
- * @value: A unsigned int value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_uint_array_builder_append_value(GArrowUIntArrayBuilder *builder,
-                                       guint64 value,
-                                       GError **error)
-{
-  return garrow_array_builder_append_value<arrow::AdaptiveUIntBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[uint-array-builder][append-value]");
-}
-
-/**
- * garrow_uint_array_builder_append_values:
- * @builder: A #GArrowUIntArrayBuilder.
- * @values: (array length=values_length): The array of unsigned int.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_uint_array_builder_append_values(GArrowUIntArrayBuilder *builder,
-                                        const guint64 *values,
-                                        gint64 values_length,
-                                        const gboolean *is_valids,
-                                        gint64 is_valids_length,
-                                        GError **error)
-{
-  return garrow_array_builder_append_values<arrow::AdaptiveUIntBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     reinterpret_cast<const uint64_t *>(values),
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[uint-array-builder][append-values]");
-}
-
-/**
- * garrow_uint_array_builder_append_null: (skip)
- * @builder: A #GArrowUIntArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_uint_array_builder_append_null(GArrowUIntArrayBuilder *builder,
-                                      GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_uint_array_builder_append_nulls: (skip)
- * @builder: A #GArrowUIntArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_uint_array_builder_append_nulls(GArrowUIntArrayBuilder *builder,
-                                       gint64 n,
-                                       GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowInt8ArrayBuilder,
-              garrow_int8_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_int8_array_builder_init(GArrowInt8ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_int8_array_builder_class_init(GArrowInt8ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_int8_array_builder_new:
- *
- * Returns: A newly created #GArrowInt8ArrayBuilder.
- */
-GArrowInt8ArrayBuilder *
-garrow_int8_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::int8(),
-                                          NULL,
-                                          "[int8-array-builder][new]");
-  return GARROW_INT8_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_int8_array_builder_append:
- * @builder: A #GArrowInt8ArrayBuilder.
- * @value: A int8 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_int8_array_builder_append_value() instead.
- */
-gboolean
-garrow_int8_array_builder_append(GArrowInt8ArrayBuilder *builder,
-                                 gint8 value,
-                                 GError **error)
-{
-  return garrow_int8_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_int8_array_builder_append_value:
- * @builder: A #GArrowInt8ArrayBuilder.
- * @value: A int8 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_int8_array_builder_append_value(GArrowInt8ArrayBuilder *builder,
-                                       gint8 value,
-                                       GError **error)
-{
-  return garrow_array_builder_append_value<arrow::Int8Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[int8-array-builder][append-value]");
-}
-
-/**
- * garrow_int8_array_builder_append_values:
- * @builder: A #GArrowInt8ArrayBuilder.
- * @values: (array length=values_length): The array of int8.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_int8_array_builder_append_values(GArrowInt8ArrayBuilder *builder,
-                                        const gint8 *values,
-                                        gint64 values_length,
-                                        const gboolean *is_valids,
-                                        gint64 is_valids_length,
-                                        GError **error)
-{
-  return garrow_array_builder_append_values<arrow::Int8Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[int8-array-builder][append-values]");
-}
-
-/**
- * garrow_int8_array_builder_append_null: (skip)
- * @builder: A #GArrowInt8ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_int8_array_builder_append_null(GArrowInt8ArrayBuilder *builder,
-                                      GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_int8_array_builder_append_nulls: (skip)
- * @builder: A #GArrowInt8ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_int8_array_builder_append_nulls(GArrowInt8ArrayBuilder *builder,
-                                       gint64 n,
-                                       GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowUInt8ArrayBuilder,
-              garrow_uint8_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_uint8_array_builder_init(GArrowUInt8ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_uint8_array_builder_class_init(GArrowUInt8ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_uint8_array_builder_new:
- *
- * Returns: A newly created #GArrowUInt8ArrayBuilder.
- */
-GArrowUInt8ArrayBuilder *
-garrow_uint8_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::uint8(),
-                                          NULL,
-                                          "[uint8-array-builder][new]");
-  return GARROW_UINT8_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_uint8_array_builder_append:
- * @builder: A #GArrowUInt8ArrayBuilder.
- * @value: An uint8 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_uint8_array_builder_append_value() instead.
- */
-gboolean
-garrow_uint8_array_builder_append(GArrowUInt8ArrayBuilder *builder,
-                                  guint8 value,
-                                  GError **error)
-{
-  return garrow_uint8_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_uint8_array_builder_append_value:
- * @builder: A #GArrowUInt8ArrayBuilder.
- * @value: An uint8 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_uint8_array_builder_append_value(GArrowUInt8ArrayBuilder *builder,
-                                  guint8 value,
-                                  GError **error)
-{
-  return garrow_array_builder_append_value<arrow::UInt8Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[uint8-array-builder][append-value]");
-}
-
-/**
- * garrow_uint8_array_builder_append_values:
- * @builder: A #GArrowUInt8ArrayBuilder.
- * @values: (array length=values_length): The array of uint8.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_uint8_array_builder_append_values(GArrowUInt8ArrayBuilder *builder,
-                                         const guint8 *values,
-                                         gint64 values_length,
-                                         const gboolean *is_valids,
-                                         gint64 is_valids_length,
-                                         GError **error)
-{
-  return garrow_array_builder_append_values<arrow::UInt8Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[uint8-array-builder][append-values]");
-}
-
-/**
- * garrow_uint8_array_builder_append_null: (skip)
- * @builder: A #GArrowUInt8ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_uint8_array_builder_append_null(GArrowUInt8ArrayBuilder *builder,
-                                       GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_uint8_array_builder_append_nulls: (skip)
- * @builder: A #GArrowUInt8ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_uint8_array_builder_append_nulls(GArrowUInt8ArrayBuilder *builder,
-                                        gint64 n,
-                                        GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowInt16ArrayBuilder,
-              garrow_int16_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_int16_array_builder_init(GArrowInt16ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_int16_array_builder_class_init(GArrowInt16ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_int16_array_builder_new:
- *
- * Returns: A newly created #GArrowInt16ArrayBuilder.
- */
-GArrowInt16ArrayBuilder *
-garrow_int16_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::int16(),
-                                          NULL,
-                                          "[int16-array-builder][new]");
-  return GARROW_INT16_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_int16_array_builder_append:
- * @builder: A #GArrowInt16ArrayBuilder.
- * @value: A int16 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_int16_array_builder_append_value() instead.
- */
-gboolean
-garrow_int16_array_builder_append(GArrowInt16ArrayBuilder *builder,
-                                  gint16 value,
-                                  GError **error)
-{
-  return garrow_int16_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_int16_array_builder_append_value:
- * @builder: A #GArrowInt16ArrayBuilder.
- * @value: A int16 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_int16_array_builder_append_value(GArrowInt16ArrayBuilder *builder,
-                                        gint16 value,
-                                        GError **error)
-{
-  return garrow_array_builder_append_value<arrow::Int16Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[int16-array-builder][append-value]");
-}
-
-/**
- * garrow_int16_array_builder_append_values:
- * @builder: A #GArrowInt16ArrayBuilder.
- * @values: (array length=values_length): The array of int16.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_int16_array_builder_append_values(GArrowInt16ArrayBuilder *builder,
-                                         const gint16 *values,
-                                         gint64 values_length,
-                                         const gboolean *is_valids,
-                                         gint64 is_valids_length,
-                                         GError **error)
-{
-  return garrow_array_builder_append_values<arrow::Int16Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[int16-array-builder][append-values]");
-}
-
-/**
- * garrow_int16_array_builder_append_null: (skip)
- * @builder: A #GArrowInt16ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_int16_array_builder_append_null(GArrowInt16ArrayBuilder *builder,
-                                       GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_int16_array_builder_append_nulls: (skip)
- * @builder: A #GArrowInt16ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_int16_array_builder_append_nulls(GArrowInt16ArrayBuilder *builder,
-                                        gint64 n,
-                                        GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowUInt16ArrayBuilder,
-              garrow_uint16_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_uint16_array_builder_init(GArrowUInt16ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_uint16_array_builder_class_init(GArrowUInt16ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_uint16_array_builder_new:
- *
- * Returns: A newly created #GArrowUInt16ArrayBuilder.
- */
-GArrowUInt16ArrayBuilder *
-garrow_uint16_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::uint16(),
-                                          NULL,
-                                          "[uint16-array-builder][new]");
-  return GARROW_UINT16_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_uint16_array_builder_append:
- * @builder: A #GArrowUInt16ArrayBuilder.
- * @value: An uint16 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_uint16_array_builder_append_value() instead.
- */
-gboolean
-garrow_uint16_array_builder_append(GArrowUInt16ArrayBuilder *builder,
-                                   guint16 value,
-                                   GError **error)
-{
-  return garrow_uint16_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_uint16_array_builder_append_value:
- * @builder: A #GArrowUInt16ArrayBuilder.
- * @value: An uint16 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_uint16_array_builder_append_value(GArrowUInt16ArrayBuilder *builder,
-                                         guint16 value,
-                                         GError **error)
-{
-  return garrow_array_builder_append_value<arrow::UInt16Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[uint16-array-builder][append-value]");
-}
-
-/**
- * garrow_uint16_array_builder_append_values:
- * @builder: A #GArrowUInt16ArrayBuilder.
- * @values: (array length=values_length): The array of uint16.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_uint16_array_builder_append_values(GArrowUInt16ArrayBuilder *builder,
-                                          const guint16 *values,
-                                          gint64 values_length,
-                                          const gboolean *is_valids,
-                                          gint64 is_valids_length,
-                                          GError **error)
-{
-  return garrow_array_builder_append_values<arrow::UInt16Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[uint16-array-builder][append-values]");
-}
-
-/**
- * garrow_uint16_array_builder_append_null: (skip)
- * @builder: A #GArrowUInt16ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_uint16_array_builder_append_null(GArrowUInt16ArrayBuilder *builder,
-                                        GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_uint16_array_builder_append_nulls: (skip)
- * @builder: A #GArrowUInt16ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_uint16_array_builder_append_nulls(GArrowUInt16ArrayBuilder *builder,
-                                         gint64 n,
-                                         GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowInt32ArrayBuilder,
-              garrow_int32_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_int32_array_builder_init(GArrowInt32ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_int32_array_builder_class_init(GArrowInt32ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_int32_array_builder_new:
- *
- * Returns: A newly created #GArrowInt32ArrayBuilder.
- */
-GArrowInt32ArrayBuilder *
-garrow_int32_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::int32(),
-                                          NULL,
-                                          "[int32-array-builder][new]");
-  return GARROW_INT32_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_int32_array_builder_append:
- * @builder: A #GArrowInt32ArrayBuilder.
- * @value: A int32 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_int32_array_builder_append_value() instead.
- */
-gboolean
-garrow_int32_array_builder_append(GArrowInt32ArrayBuilder *builder,
-                                  gint32 value,
-                                  GError **error)
-{
-  return garrow_int32_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_int32_array_builder_append_value:
- * @builder: A #GArrowInt32ArrayBuilder.
- * @value: A int32 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_int32_array_builder_append_value(GArrowInt32ArrayBuilder *builder,
-                                        gint32 value,
-                                        GError **error)
-{
-  return garrow_array_builder_append_value<arrow::Int32Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[int32-array-builder][append-value]");
-}
-
-/**
- * garrow_int32_array_builder_append_values:
- * @builder: A #GArrowInt32ArrayBuilder.
- * @values: (array length=values_length): The array of int32.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_int32_array_builder_append_values(GArrowInt32ArrayBuilder *builder,
-                                         const gint32 *values,
-                                         gint64 values_length,
-                                         const gboolean *is_valids,
-                                         gint64 is_valids_length,
-                                         GError **error)
-{
-  return garrow_array_builder_append_values<arrow::Int32Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[int32-array-builder][append-values]");
-}
-
-/**
- * garrow_int32_array_builder_append_null: (skip)
- * @builder: A #GArrowInt32ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_int32_array_builder_append_null(GArrowInt32ArrayBuilder *builder,
-                                       GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_int32_array_builder_append_nulls: (skip)
- * @builder: A #GArrowInt32ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_int32_array_builder_append_nulls(GArrowInt32ArrayBuilder *builder,
-                                        gint64 n,
-                                        GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowUInt32ArrayBuilder,
-              garrow_uint32_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_uint32_array_builder_init(GArrowUInt32ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_uint32_array_builder_class_init(GArrowUInt32ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_uint32_array_builder_new:
- *
- * Returns: A newly created #GArrowUInt32ArrayBuilder.
- */
-GArrowUInt32ArrayBuilder *
-garrow_uint32_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::uint32(),
-                                          NULL,
-                                          "[uint32-array-builder][new]");
-  return GARROW_UINT32_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_uint32_array_builder_append:
- * @builder: A #GArrowUInt32ArrayBuilder.
- * @value: An uint32 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_uint32_array_builder_append_value() instead.
- */
-gboolean
-garrow_uint32_array_builder_append(GArrowUInt32ArrayBuilder *builder,
-                                   guint32 value,
-                                   GError **error)
-{
-  return garrow_uint32_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_uint32_array_builder_append_value:
- * @builder: A #GArrowUInt32ArrayBuilder.
- * @value: An uint32 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_uint32_array_builder_append_value(GArrowUInt32ArrayBuilder *builder,
-                                         guint32 value,
-                                         GError **error)
-{
-  return garrow_array_builder_append_value<arrow::UInt32Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[uint32-array-builder][append-value]");
-}
-
-/**
- * garrow_uint32_array_builder_append_values:
- * @builder: A #GArrowUInt32ArrayBuilder.
- * @values: (array length=values_length): The array of uint32.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_uint32_array_builder_append_values(GArrowUInt32ArrayBuilder *builder,
-                                          const guint32 *values,
-                                          gint64 values_length,
-                                          const gboolean *is_valids,
-                                          gint64 is_valids_length,
-                                          GError **error)
-{
-  return garrow_array_builder_append_values<arrow::UInt32Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[uint32-array-builder][append-values]");
-}
-
-/**
- * garrow_uint32_array_builder_append_null: (skip)
- * @builder: A #GArrowUInt32ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_uint32_array_builder_append_null(GArrowUInt32ArrayBuilder *builder,
-                                        GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_uint32_array_builder_append_nulls: (skip)
- * @builder: A #GArrowUInt32ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_uint32_array_builder_append_nulls(GArrowUInt32ArrayBuilder *builder,
-                                         gint64 n,
-                                         GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowInt64ArrayBuilder,
-              garrow_int64_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_int64_array_builder_init(GArrowInt64ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_int64_array_builder_class_init(GArrowInt64ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_int64_array_builder_new:
- *
- * Returns: A newly created #GArrowInt64ArrayBuilder.
- */
-GArrowInt64ArrayBuilder *
-garrow_int64_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::int64(),
-                                          NULL,
-                                          "[int64-array-builder][new]");
-  return GARROW_INT64_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_int64_array_builder_append:
- * @builder: A #GArrowInt64ArrayBuilder.
- * @value: A int64 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_int64_array_builder_append_value() instead.
- */
-gboolean
-garrow_int64_array_builder_append(GArrowInt64ArrayBuilder *builder,
-                                  gint64 value,
-                                  GError **error)
-{
-  return garrow_int64_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_int64_array_builder_append_value:
- * @builder: A #GArrowInt64ArrayBuilder.
- * @value: A int64 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_int64_array_builder_append_value(GArrowInt64ArrayBuilder *builder,
-                                        gint64 value,
-                                        GError **error)
-{
-  return garrow_array_builder_append_value<arrow::Int64Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[int64-array-builder][append-value]");
-}
-
-/**
- * garrow_int64_array_builder_append_values:
- * @builder: A #GArrowInt64ArrayBuilder.
- * @values: (array length=values_length): The array of int64.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_int64_array_builder_append_values(GArrowInt64ArrayBuilder *builder,
-                                         const gint64 *values,
-                                         gint64 values_length,
-                                         const gboolean *is_valids,
-                                         gint64 is_valids_length,
-                                         GError **error)
-{
-  return garrow_array_builder_append_values<arrow::Int64Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     reinterpret_cast<const int64_t *>(values),
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[int64-array-builder][append-values]");
-}
-
-/**
- * garrow_int64_array_builder_append_null: (skip)
- * @builder: A #GArrowInt64ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_int64_array_builder_append_null(GArrowInt64ArrayBuilder *builder,
-                                       GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_int64_array_builder_append_nulls: (skip)
- * @builder: A #GArrowInt64ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_int64_array_builder_append_nulls(GArrowInt64ArrayBuilder *builder,
-                                        gint64 n,
-                                        GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowUInt64ArrayBuilder,
-              garrow_uint64_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_uint64_array_builder_init(GArrowUInt64ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_uint64_array_builder_class_init(GArrowUInt64ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_uint64_array_builder_new:
- *
- * Returns: A newly created #GArrowUInt64ArrayBuilder.
- */
-GArrowUInt64ArrayBuilder *
-garrow_uint64_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::uint64(),
-                                          NULL,
-                                          "[uint64-array-builder][new]");
-  return GARROW_UINT64_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_uint64_array_builder_append:
- * @builder: A #GArrowUInt64ArrayBuilder.
- * @value: An uint64 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_uint64_array_builder_append_value() instead.
- */
-gboolean
-garrow_uint64_array_builder_append(GArrowUInt64ArrayBuilder *builder,
-                                  guint64 value,
-                                  GError **error)
-{
-  return garrow_uint64_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_uint64_array_builder_append_value:
- * @builder: A #GArrowUInt64ArrayBuilder.
- * @value: An uint64 value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_uint64_array_builder_append_value(GArrowUInt64ArrayBuilder *builder,
-                                         guint64 value,
-                                         GError **error)
-{
-  return garrow_array_builder_append_value<arrow::UInt64Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[uint64-array-builder][append-value]");
-}
-
-/**
- * garrow_uint64_array_builder_append_values:
- * @builder: A #GArrowUInt64ArrayBuilder.
- * @values: (array length=values_length): The array of uint64.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_uint64_array_builder_append_values(GArrowUInt64ArrayBuilder *builder,
-                                          const guint64 *values,
-                                          gint64 values_length,
-                                          const gboolean *is_valids,
-                                          gint64 is_valids_length,
-                                          GError **error)
-{
-  return garrow_array_builder_append_values<arrow::UInt64Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     reinterpret_cast<const uint64_t *>(values),
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[uint64-array-builder][append-values]");
-}
-
-/**
- * garrow_uint64_array_builder_append_null: (skip)
- * @builder: A #GArrowUInt64ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_uint64_array_builder_append_null(GArrowUInt64ArrayBuilder *builder,
-                                        GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_uint64_array_builder_append_nulls: (skip)
- * @builder: A #GArrowUInt64ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_uint64_array_builder_append_nulls(GArrowUInt64ArrayBuilder *builder,
-                                         gint64 n,
-                                         GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowFloatArrayBuilder,
-              garrow_float_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_float_array_builder_init(GArrowFloatArrayBuilder *builder)
-{
-}
-
-static void
-garrow_float_array_builder_class_init(GArrowFloatArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_float_array_builder_new:
- *
- * Returns: A newly created #GArrowFloatArrayBuilder.
- */
-GArrowFloatArrayBuilder *
-garrow_float_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::float32(),
-                                          NULL,
-                                          "[float-array-builder][new]");
-  return GARROW_FLOAT_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_float_array_builder_append:
- * @builder: A #GArrowFloatArrayBuilder.
- * @value: A float value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_float_array_builder_append_value() instead.
- */
-gboolean
-garrow_float_array_builder_append(GArrowFloatArrayBuilder *builder,
-                                  gfloat value,
-                                  GError **error)
-{
-  return garrow_float_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_float_array_builder_append_value:
- * @builder: A #GArrowFloatArrayBuilder.
- * @value: A float value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_float_array_builder_append_value(GArrowFloatArrayBuilder *builder,
-                                        gfloat value,
-                                        GError **error)
-{
-  return garrow_array_builder_append_value<arrow::FloatBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[float-array-builder][append-value]");
-}
-
-/**
- * garrow_float_array_builder_append_values:
- * @builder: A #GArrowFloatArrayBuilder.
- * @values: (array length=values_length): The array of float.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_float_array_builder_append_values(GArrowFloatArrayBuilder *builder,
-                                         const gfloat *values,
-                                         gint64 values_length,
-                                         const gboolean *is_valids,
-                                         gint64 is_valids_length,
-                                         GError **error)
-{
-  return garrow_array_builder_append_values<arrow::FloatBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[float-array-builder][append-values]");
-}
-
-/**
- * garrow_float_array_builder_append_null: (skip)
- * @builder: A #GArrowFloatArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_float_array_builder_append_null(GArrowFloatArrayBuilder *builder,
-                                       GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_float_array_builder_append_nulls: (skip)
- * @builder: A #GArrowFloatArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_float_array_builder_append_nulls(GArrowFloatArrayBuilder *builder,
-                                        gint64 n,
-                                        GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowDoubleArrayBuilder,
-              garrow_double_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_double_array_builder_init(GArrowDoubleArrayBuilder *builder)
-{
-}
-
-static void
-garrow_double_array_builder_class_init(GArrowDoubleArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_double_array_builder_new:
- *
- * Returns: A newly created #GArrowDoubleArrayBuilder.
- */
-GArrowDoubleArrayBuilder *
-garrow_double_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::float64(),
-                                          NULL,
-                                          "[double-array-builder][new]");
-  return GARROW_DOUBLE_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_double_array_builder_append:
- * @builder: A #GArrowDoubleArrayBuilder.
- * @value: A double value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_double_array_builder_append_value() instead.
- */
-gboolean
-garrow_double_array_builder_append(GArrowDoubleArrayBuilder *builder,
-                                   gdouble value,
-                                   GError **error)
-{
-  return garrow_double_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_double_array_builder_append_value:
- * @builder: A #GArrowDoubleArrayBuilder.
- * @value: A double value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_double_array_builder_append_value(GArrowDoubleArrayBuilder *builder,
-                                         gdouble value,
-                                         GError **error)
-{
-  return garrow_array_builder_append_value<arrow::DoubleBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[double-array-builder][append-value]");
-}
-
-/**
- * garrow_double_array_builder_append_values:
- * @builder: A #GArrowDoubleArrayBuilder.
- * @values: (array length=values_length): The array of double.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_double_array_builder_append_values(GArrowDoubleArrayBuilder *builder,
-                                          const gdouble *values,
-                                          gint64 values_length,
-                                          const gboolean *is_valids,
-                                          gint64 is_valids_length,
-                                          GError **error)
-{
-  return garrow_array_builder_append_values<arrow::DoubleBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[double-array-builder][append-values]");
-}
-
-/**
- * garrow_double_array_builder_append_null: (skip)
- * @builder: A #GArrowDoubleArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_double_array_builder_append_null(GArrowDoubleArrayBuilder *builder,
-                                        GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_double_array_builder_append_nulls: (skip)
- * @builder: A #GArrowDoubleArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_double_array_builder_append_nulls(GArrowDoubleArrayBuilder *builder,
-                                         gint64 n,
-                                         GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowBinaryArrayBuilder,
-              garrow_binary_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_binary_array_builder_init(GArrowBinaryArrayBuilder *builder)
-{
-}
-
-static void
-garrow_binary_array_builder_class_init(GArrowBinaryArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_binary_array_builder_new:
- *
- * Returns: A newly created #GArrowBinaryArrayBuilder.
- */
-GArrowBinaryArrayBuilder *
-garrow_binary_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::binary(),
-                                          NULL,
-                                          "[binary-array-builder][new]");
-  return GARROW_BINARY_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_binary_array_builder_append:
- * @builder: A #GArrowBinaryArrayBuilder.
- * @value: (array length=length): A binary value.
- * @length: A value length.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_binary_array_builder_append_value() instead.
- */
-gboolean
-garrow_binary_array_builder_append(GArrowBinaryArrayBuilder *builder,
-                                   const guint8 *value,
-                                   gint32 length,
-                                   GError **error)
-{
-  return garrow_binary_array_builder_append_value(builder, value, length, error);
-}
-
-/**
- * garrow_binary_array_builder_append_value:
- * @builder: A #GArrowBinaryArrayBuilder.
- * @value: (array length=length): A binary value.
- * @length: A value length.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_binary_array_builder_append_value(GArrowBinaryArrayBuilder *builder,
-                                         const guint8 *value,
-                                         gint32 length,
-                                         GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::BinaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  auto status = arrow_builder->Append(value, length);
-  return garrow_error_check(error,
-                            status,
-                            "[binary-array-builder][append-value]");
-}
-
-/**
- * garrow_binary_array_builder_append_value_bytes:
- * @builder: A #GArrowBinaryArrayBuilder.
- * @value: A binary value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- */
-gboolean
-garrow_binary_array_builder_append_value_bytes(GArrowBinaryArrayBuilder *builder,
-                                               GBytes *value,
-                                               GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::BinaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  gsize size;
-  auto data = g_bytes_get_data(value, &size);
-  auto status = arrow_builder->Append(static_cast<const uint8_t *>(data),
-                                      size);
-  return garrow_error_check(error,
-                            status,
-                            "[binary-array-builder][append-value-bytes]");
-}
-
-/**
- * garrow_binary_array_builder_append_values:
- * @builder: A #GArrowBinaryArrayBuilder.
- * @values: (array length=values_length): The array of #GBytes.
- * @values_length: The length of @values.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth @is_valids is %TRUE, the Nth @values is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of @is_valids.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- */
-gboolean
-garrow_binary_array_builder_append_values(GArrowBinaryArrayBuilder *builder,
-                                          GBytes **values,
-                                          gint64 values_length,
-                                          const gboolean *is_valids,
-                                          gint64 is_valids_length,
-                                          GError **error)
-{
-  return garrow_array_builder_append_values<arrow::BinaryBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[binary-array-builder][append-values]");
-}
-
-/**
- * garrow_binary_array_builder_append_null: (skip)
- * @builder: A #GArrowBinaryArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_binary_array_builder_append_null(GArrowBinaryArrayBuilder *builder,
-                                        GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_binary_array_builder_append_nulls: (skip)
- * @builder: A #GArrowBinaryArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_binary_array_builder_append_nulls(GArrowBinaryArrayBuilder *builder,
-                                         gint64 n,
-                                         GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowLargeBinaryArrayBuilder,
-              garrow_large_binary_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_large_binary_array_builder_init(GArrowLargeBinaryArrayBuilder *builder)
-{
-}
-
-static void
-garrow_large_binary_array_builder_class_init(GArrowLargeBinaryArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_large_binary_array_builder_new:
- *
- * Returns: A newly created #GArrowLargeBinaryArrayBuilder.
- *
- * Since: 0.16.0
- */
-GArrowLargeBinaryArrayBuilder *
-garrow_large_binary_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::large_binary(),
-                                          NULL,
-                                          "[large-binary-array-builder][new]");
-  return GARROW_LARGE_BINARY_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_large_binary_array_builder_append_value:
- * @builder: A #GArrowLargeBinaryArrayBuilder.
- * @value: (array length=length): A binary value.
- * @length: A value length.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- */
-gboolean
-garrow_large_binary_array_builder_append_value(GArrowLargeBinaryArrayBuilder *builder,
-                                               const guint8 *value,
-                                               gint64 length,
-                                               GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::LargeBinaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  auto status = arrow_builder->Append(value, length);
-  return garrow_error_check(error,
-                            status,
-                            "[large-binary-array-builder][append-value]");
-}
-
-/**
- * garrow_large_binary_array_builder_append_value_bytes:
- * @builder: A #GArrowLargeBinaryArrayBuilder.
- * @value: A binary value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- */
-gboolean
-garrow_large_binary_array_builder_append_value_bytes(GArrowLargeBinaryArrayBuilder *builder,
-                                                     GBytes *value,
-                                                     GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::LargeBinaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  gsize size;
-  gconstpointer data = g_bytes_get_data(value, &size);
-  auto status = arrow_builder->Append(static_cast<const uint8_t *>(data),
-                                      size);
-  return garrow_error_check(error,
-                            status,
-                            "[large-binary-array-builder][append-value-bytes]");
-}
-
-/**
- * garrow_large_binary_array_builder_append_values:
- * @builder: A #GArrowLargeBinaryArrayBuilder.
- * @values: (array length=values_length): The array of #GBytes.
- * @values_length: The length of @values.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth @is_valids is %TRUE, the Nth @values is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of @is_valids.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- */
-gboolean
-garrow_large_binary_array_builder_append_values(GArrowLargeBinaryArrayBuilder *builder,
-                                                GBytes **values,
-                                                gint64 values_length,
-                                                const gboolean *is_valids,
-                                                gint64 is_valids_length,
-                                                GError **error)
-{
-  return garrow_array_builder_append_values<arrow::LargeBinaryBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[large-binary-array-builder][append-values]");
-}
-
-/**
- * garrow_large_binary_array_builder_append_null: (skip)
- * @builder: A #GArrowLargeBinaryArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_large_binary_array_builder_append_null(GArrowLargeBinaryArrayBuilder *builder,
-                                              GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_large_binary_array_builder_append_nulls: (skip)
- * @builder: A #GArrowLargeBinaryArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_large_binary_array_builder_append_nulls(GArrowLargeBinaryArrayBuilder *builder,
-                                               gint64 n,
-                                               GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowStringArrayBuilder,
-              garrow_string_array_builder,
-              GARROW_TYPE_BINARY_ARRAY_BUILDER)
-
-static void
-garrow_string_array_builder_init(GArrowStringArrayBuilder *builder)
-{
-}
-
-static void
-garrow_string_array_builder_class_init(GArrowStringArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_string_array_builder_new:
- *
- * Returns: A newly created #GArrowStringArrayBuilder.
- */
-GArrowStringArrayBuilder *
-garrow_string_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::utf8(),
-                                          NULL,
-                                          "[string-array-builder][new]");
-  return GARROW_STRING_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_string_array_builder_append:
- * @builder: A #GArrowStringArrayBuilder.
- * @value: A string value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Deprecated: 0.12.0:
- *   Use garrow_string_array_builder_append_value() instead.
- */
-gboolean
-garrow_string_array_builder_append(GArrowStringArrayBuilder *builder,
-                                   const gchar *value,
-                                   GError **error)
-{
-  return garrow_string_array_builder_append_string(builder, value, error);
-}
-
-/**
- * garrow_string_array_builder_append_value: (skip)
- * @builder: A #GArrowStringArrayBuilder.
- * @value: A string value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- *
- * Deprecated: 1.0.0:
- *   Use garrow_string_array_builder_append_string() instead.
- */
-gboolean
-garrow_string_array_builder_append_value(GArrowStringArrayBuilder *builder,
-                                         const gchar *value,
-                                         GError **error)
-{
-  return garrow_string_array_builder_append_string(builder, value, error);
-}
-
-/**
- * garrow_string_array_builder_append_string:
- * @builder: A #GArrowStringArrayBuilder.
- * @value: A string value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- */
-gboolean
-garrow_string_array_builder_append_string(GArrowStringArrayBuilder *builder,
-                                          const gchar *value,
-                                          GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::StringBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  auto status = arrow_builder->Append(value,
-                                      static_cast<gint32>(strlen(value)));
-  return garrow_error_check(error,
-                            status,
-                            "[string-array-builder][append-string]");
-}
-
-/**
- * garrow_string_array_builder_append_values: (skip)
- * @builder: A #GArrowStringArrayBuilder.
- * @values: (array length=values_length): The array of strings.
- * @values_length: The length of @values.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth @is_valids is %TRUE, the Nth @values is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of @is_valids.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.10.0
- *
- * Deprecated: 1.0.0:
- *   Use garrow_string_array_builder_append_strings() instead.
- */
-gboolean
-garrow_string_array_builder_append_values(GArrowStringArrayBuilder *builder,
-                                          const gchar **values,
-                                          gint64 values_length,
-                                          const gboolean *is_valids,
-                                          gint64 is_valids_length,
-                                          GError **error)
-{
-  return garrow_string_array_builder_append_strings(builder,
-                                                    values,
-                                                    values_length,
-                                                    is_valids,
-                                                    is_valids_length,
-                                                    error);
-}
-
-/**
- * garrow_string_array_builder_append_strings:
- * @builder: A #GArrowStringArrayBuilder.
- * @values: (array length=values_length): The array of strings.
- * @values_length: The length of @values.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth @is_valids is %TRUE, the Nth @values is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of @is_valids.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- */
-gboolean
-garrow_string_array_builder_append_strings(GArrowStringArrayBuilder *builder,
-                                           const gchar **values,
-                                           gint64 values_length,
-                                           const gboolean *is_valids,
-                                           gint64 is_valids_length,
-                                           GError **error)
-{
-  return garrow_array_builder_append_values<arrow::StringBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[string-array-builder][append-strings]");
-}
-
-
-G_DEFINE_TYPE(GArrowLargeStringArrayBuilder,
-              garrow_large_string_array_builder,
-              GARROW_TYPE_LARGE_BINARY_ARRAY_BUILDER)
-
-static void
-garrow_large_string_array_builder_init(GArrowLargeStringArrayBuilder *builder)
-{
-}
-
-static void
-garrow_large_string_array_builder_class_init(GArrowLargeStringArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_large_string_array_builder_new:
- *
- * Returns: A newly created #GArrowLargeStringArrayBuilder.
- *
- * Since: 0.16.0
- */
-GArrowLargeStringArrayBuilder *
-garrow_large_string_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::large_utf8(),
-                                          NULL,
-                                          "[large-string-array-builder][new]");
-  return GARROW_LARGE_STRING_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_large_string_array_builder_append_string:
- * @builder: A #GArrowLargeStringArrayBuilder.
- * @value: A string value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- */
-gboolean
-garrow_large_string_array_builder_append_string(GArrowLargeStringArrayBuilder *builder,
-                                                const gchar *value,
-                                                GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::LargeStringBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  auto status = arrow_builder->Append(value);
-  return garrow_error_check(error,
-                            status,
-                            "[large-string-array-builder][append-string]");
-}
-
-/**
- * garrow_large_string_array_builder_append_strings:
- * @builder: A #GArrowLargeStringArrayBuilder.
- * @values: (array length=values_length): The array of strings.
- * @values_length: The length of @values.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth @is_valids is %TRUE, the Nth @values is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of @is_valids.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.16.0
- */
-gboolean
-garrow_large_string_array_builder_append_strings(GArrowLargeStringArrayBuilder *builder,
-                                                 const gchar **values,
-                                                 gint64 values_length,
-                                                 const gboolean *is_valids,
-                                                 gint64 is_valids_length,
-                                                 GError **error)
-{
-  return garrow_array_builder_append_values<arrow::LargeStringBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[large-string-array-builder][append-strings]");
-}
-
-
-G_DEFINE_TYPE(GArrowFixedSizeBinaryArrayBuilder,
-              garrow_fixed_size_binary_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_fixed_size_binary_array_builder_init(
-  GArrowFixedSizeBinaryArrayBuilder *builder)
-{
-}
-
-static void
-garrow_fixed_size_binary_array_builder_class_init(
-  GArrowFixedSizeBinaryArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_fixed_size_binary_array_builder_new:
- * @data_type: A #GArrowFixedSizeBinaryDataType for created array.
- *
- * Returns: A newly created #GArrowFixedSizeBinaryArrayBuilder.
- */
-GArrowFixedSizeBinaryArrayBuilder *
-garrow_fixed_size_binary_array_builder_new(
-  GArrowFixedSizeBinaryDataType *data_type)
-{
-  auto arrow_data_type = garrow_data_type_get_raw(GARROW_DATA_TYPE(data_type));
-  auto builder =
-    garrow_array_builder_new(arrow_data_type,
-                             NULL,
-                             "[fixed-size-binary-array-builder][new]");
-  return GARROW_FIXED_SIZE_BINARY_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_fixed_size_binary_array_builder_append_value:
- * @builder: A #GArrowFixedSizeBinaryArrayBuilder.
- * @value: (nullable) (array length=length): A binary value.
- * @length: A value length.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_fixed_size_binary_array_builder_append_value(
-  GArrowFixedSizeBinaryArrayBuilder *builder,
-  const guint8 *value,
-  gint32 length,
-  GError **error)
-{
-  const gchar *context = "[fixed-size-binary-array-builder][append-value]";
-  auto arrow_builder =
-    static_cast<arrow::FixedSizeBinaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  arrow::Status status;
-  if (value) {
-    if (arrow_builder->byte_width() != length) {
-      g_set_error(error,
-                  GARROW_ERROR,
-                  GARROW_ERROR_INVALID,
-                  "%s: value size must be <%d>: <%d>",
-                  context,
-                  arrow_builder->byte_width(),
-                  length);
-      return FALSE;
-    }
-    status = arrow_builder->Append(value);
-  } else {
-    status = arrow_builder->AppendNull();
-  }
-  return garrow_error_check(error, status, context);
-}
-
-/**
- * garrow_fixed_size_binary_array_builder_append_value_bytes:
- * @builder: A #GArrowFixedSizeBinaryArrayBuilder.
- * @value: A binary value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_fixed_size_binary_array_builder_append_value_bytes(
-  GArrowFixedSizeBinaryArrayBuilder *builder,
-  GBytes *value,
-  GError **error)
-{
-  const gchar *context = "[fixed-size-binary-array-builder][append-value-bytes]";
-  auto arrow_builder =
-    static_cast<arrow::FixedSizeBinaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  gsize size;
-  auto data = g_bytes_get_data(value, &size);
-  if (arrow_builder->byte_width() != static_cast<gint32>(size)) {
-    g_set_error(error,
-                GARROW_ERROR,
-                GARROW_ERROR_INVALID,
-                "%s: value size must be <%d>: <%" G_GSIZE_FORMAT ">",
-                context,
-                arrow_builder->byte_width(),
-                size);
-    return FALSE;
-  }
-  auto status = arrow_builder->Append(static_cast<const uint8_t *>(data));
-  return garrow_error_check(error, status, context);
-}
-
-/**
- * garrow_fixed_size_binary_array_builder_append_values:
- * @builder: A #GArrowFixedSizeBinaryArrayBuilder.
- * @values: (array length=values_length): The array of #GBytes.
- * @values_length: The length of @values.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth @is_valids is %TRUE, the Nth @values is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of @is_valids.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_fixed_size_binary_array_builder_append_values(
-  GArrowFixedSizeBinaryArrayBuilder *builder,
-  GBytes **values,
-  gint64 values_length,
-  const gboolean *is_valids,
-  gint64 is_valids_length,
-  GError **error)
-{
-  return garrow_array_builder_append_values(
-    GARROW_ARRAY_BUILDER(builder),
-    values,
-    values_length,
-    is_valids,
-    is_valids_length,
-    error,
-    "[fixed-size-binary-array-builder][append-values]",
-    [](guint8 *output, GBytes *value, gsize size) {
-      size_t data_size;
-      auto raw_data = g_bytes_get_data(value, &data_size);
-      memcpy(output, raw_data, size);
-    });
-}
-
-/**
- * garrow_fixed_size_binary_array_builder_append_values_packed:
- * @builder: A #GArrowFixedSizeBinaryArrayBuilder.
- * @values: A #GBytes that contains multiple values.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth @is_valids is %TRUE, the Nth @values is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of @is_valids.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * This is more efficient than
- * garrow_fixed_size_binary_array_builder_append_values().
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_fixed_size_binary_array_builder_append_values_packed(
-  GArrowFixedSizeBinaryArrayBuilder *builder,
-  GBytes *values,
-  const gboolean *is_valids,
-  gint64 is_valids_length,
-  GError **error)
-{
-  return garrow_array_builder_append_values<arrow::FixedSizeBinaryBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     is_valids,
-     is_valids_length,
-     error,
-     "[fixed-size-binary-array-builder][append-values-packed]");
-}
-
-
-G_DEFINE_TYPE(GArrowDate32ArrayBuilder,
-              garrow_date32_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_date32_array_builder_init(GArrowDate32ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_date32_array_builder_class_init(GArrowDate32ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_date32_array_builder_new:
- *
- * Returns: A newly created #GArrowDate32ArrayBuilder.
- *
- * Since: 0.7.0
- */
-GArrowDate32ArrayBuilder *
-garrow_date32_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::date32(),
-                                          NULL,
-                                          "[date32-array-builder][new]");
-  return GARROW_DATE32_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_date32_array_builder_append:
- * @builder: A #GArrowDate32ArrayBuilder.
- * @value: The number of days since UNIX epoch in signed 32bit integer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.7.0
- *
- * Deprecated: 0.12.0:
- *   Use garrow_date32_array_builder_append_value() instead.
- */
-gboolean
-garrow_date32_array_builder_append(GArrowDate32ArrayBuilder *builder,
-                                   gint32 value,
-                                   GError **error)
-{
-  return garrow_date32_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_date32_array_builder_append_value:
- * @builder: A #GArrowDate32ArrayBuilder.
- * @value: The number of days since UNIX epoch in signed 32bit integer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_date32_array_builder_append_value(GArrowDate32ArrayBuilder *builder,
-                                         gint32 value,
-                                         GError **error)
-{
-  return garrow_array_builder_append_value<arrow::Date32Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[date32-array-builder][append-value]");
-}
-
-/**
- * garrow_date32_array_builder_append_values:
- * @builder: A #GArrowDate32ArrayBuilder.
- * @values: (array length=values_length): The array of
- *   the number of days since UNIX epoch in signed 32bit integer.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_date32_array_builder_append_values(GArrowDate32ArrayBuilder *builder,
-                                          const gint32 *values,
-                                          gint64 values_length,
-                                          const gboolean *is_valids,
-                                          gint64 is_valids_length,
-                                          GError **error)
-{
-  return garrow_array_builder_append_values<arrow::Date32Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[date32-array-builder][append-values]");
-}
-
-/**
- * garrow_date32_array_builder_append_null: (skip)
- * @builder: A #GArrowDate32ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.7.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_date32_array_builder_append_null(GArrowDate32ArrayBuilder *builder,
-                                        GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_date32_array_builder_append_nulls: (skip)
- * @builder: A #GArrowDate32ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_date32_array_builder_append_nulls(GArrowDate32ArrayBuilder *builder,
-                                         gint64 n,
-                                         GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowDate64ArrayBuilder,
-              garrow_date64_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_date64_array_builder_init(GArrowDate64ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_date64_array_builder_class_init(GArrowDate64ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_date64_array_builder_new:
- *
- * Returns: A newly created #GArrowDate64ArrayBuilder.
- *
- * Since: 0.7.0
- */
-GArrowDate64ArrayBuilder *
-garrow_date64_array_builder_new(void)
-{
-  auto builder = garrow_array_builder_new(arrow::date64(),
-                                          NULL,
-                                          "[date64-array-builder][new]");
-  return GARROW_DATE64_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_date64_array_builder_append:
- * @builder: A #GArrowDate64ArrayBuilder.
- * @value: The number of milliseconds since UNIX epoch in signed 64bit integer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.7.0
- *
- * Deprecated: 0.12.0:
- *   Use garrow_date64_array_builder_append_value() instead.
- */
-gboolean
-garrow_date64_array_builder_append(GArrowDate64ArrayBuilder *builder,
-                                   gint64 value,
-                                   GError **error)
-{
-  return garrow_date64_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_date64_array_builder_append_value:
- * @builder: A #GArrowDate64ArrayBuilder.
- * @value: The number of milliseconds since UNIX epoch in signed 64bit integer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_date64_array_builder_append_value(GArrowDate64ArrayBuilder *builder,
-                                         gint64 value,
-                                         GError **error)
-{
-  return garrow_array_builder_append_value<arrow::Date64Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[date64-array-builder][append-value]");
-}
-
-/**
- * garrow_date64_array_builder_append_values:
- * @builder: A #GArrowDate64ArrayBuilder.
- * @values: (array length=values_length): The array of
- *   the number of milliseconds since UNIX epoch in signed 64bit integer.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_date64_array_builder_append_values(GArrowDate64ArrayBuilder *builder,
-                                          const gint64 *values,
-                                          gint64 values_length,
-                                          const gboolean *is_valids,
-                                          gint64 is_valids_length,
-                                          GError **error)
-{
-  return garrow_array_builder_append_values<arrow::Date64Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     reinterpret_cast<const int64_t *>(values),
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[date64-array-builder][append-values]");
-}
-
-/**
- * garrow_date64_array_builder_append_null: (skip)
- * @builder: A #GArrowDate64ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.7.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_date64_array_builder_append_null(GArrowDate64ArrayBuilder *builder,
-                                        GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_date64_array_builder_append_nulls: (skip)
- * @builder: A #GArrowDate64ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_date64_array_builder_append_nulls(GArrowDate64ArrayBuilder *builder,
-                                         gint64 n,
-                                         GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowTimestampArrayBuilder,
-              garrow_timestamp_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_timestamp_array_builder_init(GArrowTimestampArrayBuilder *builder)
-{
-}
-
-static void
-garrow_timestamp_array_builder_class_init(GArrowTimestampArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_timestamp_array_builder_new:
- * @data_type: A #GArrowTimestampDataType.
- *
- * Returns: A newly created #GArrowTimestampArrayBuilder.
- *
- * Since: 0.7.0
- */
-GArrowTimestampArrayBuilder *
-garrow_timestamp_array_builder_new(GArrowTimestampDataType *data_type)
-{
-  auto arrow_data_type = garrow_data_type_get_raw(GARROW_DATA_TYPE(data_type));
-  auto builder = garrow_array_builder_new(arrow_data_type,
-                                          NULL,
-                                          "[timestamp-array-builder][new]");
-  return GARROW_TIMESTAMP_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_timestamp_array_builder_append:
- * @builder: A #GArrowTimestampArrayBuilder.
- * @value: The number of milliseconds since UNIX epoch in signed 64bit integer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.7.0
- *
- * Deprecated: 0.12.0:
- *   Use garrow_timestamp_array_builder_append_value() instead.
- */
-gboolean
-garrow_timestamp_array_builder_append(GArrowTimestampArrayBuilder *builder,
-                                      gint64 value,
-                                      GError **error)
-{
-  return garrow_timestamp_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_timestamp_array_builder_append_value:
- * @builder: A #GArrowTimestampArrayBuilder.
- * @value: The number of milliseconds since UNIX epoch in signed 64bit integer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_timestamp_array_builder_append_value(GArrowTimestampArrayBuilder *builder,
-                                            gint64 value,
-                                            GError **error)
-{
-  return garrow_array_builder_append_value<arrow::TimestampBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[timestamp-array-builder][append-value]");
-}
-
-/**
- * garrow_timestamp_array_builder_append_values:
- * @builder: A #GArrowTimestampArrayBuilder.
- * @values: (array length=values_length): The array of
- *   the number of milliseconds since UNIX epoch in signed 64bit integer.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_timestamp_array_builder_append_values(GArrowTimestampArrayBuilder *builder,
-                                             const gint64 *values,
-                                             gint64 values_length,
-                                             const gboolean *is_valids,
-                                             gint64 is_valids_length,
-                                             GError **error)
-{
-  return garrow_array_builder_append_values<arrow::TimestampBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     reinterpret_cast<const int64_t *>(values),
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[timestamp-array-builder][append-values]");
-}
-
-/**
- * garrow_timestamp_array_builder_append_null: (skip)
- * @builder: A #GArrowTimestampArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.7.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_timestamp_array_builder_append_null(GArrowTimestampArrayBuilder *builder,
-                                           GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_timestamp_array_builder_append_nulls: (skip)
- * @builder: A #GArrowTimestampArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_timestamp_array_builder_append_nulls(GArrowTimestampArrayBuilder *builder,
-                                            gint64 n,
-                                            GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowTime32ArrayBuilder,
-              garrow_time32_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_time32_array_builder_init(GArrowTime32ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_time32_array_builder_class_init(GArrowTime32ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_time32_array_builder_new:
- * @data_type: A #GArrowTime32DataType.
- *
- * Returns: A newly created #GArrowTime32ArrayBuilder.
- *
- * Since: 0.7.0
- */
-GArrowTime32ArrayBuilder *
-garrow_time32_array_builder_new(GArrowTime32DataType *data_type)
-{
-  auto arrow_data_type = garrow_data_type_get_raw(GARROW_DATA_TYPE(data_type));
-  auto builder = garrow_array_builder_new(arrow_data_type,
-                                          NULL,
-                                          "[time32-array-builder][new]");
-  return GARROW_TIME32_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_time32_array_builder_append:
- * @builder: A #GArrowTime32ArrayBuilder.
- * @value: The number of days since UNIX epoch in signed 32bit integer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.7.0
- *
- * Deprecated: 0.12.0:
- *   Use garrow_time32_array_builder_append_value() instead.
- */
-gboolean
-garrow_time32_array_builder_append(GArrowTime32ArrayBuilder *builder,
-                                   gint32 value,
-                                   GError **error)
-{
-  return garrow_time32_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_time32_array_builder_append_value:
- * @builder: A #GArrowTime32ArrayBuilder.
- * @value: The number of days since UNIX epoch in signed 32bit integer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_time32_array_builder_append_value(GArrowTime32ArrayBuilder *builder,
-                                         gint32 value,
-                                         GError **error)
-{
-  return garrow_array_builder_append_value<arrow::Time32Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[time32-array-builder][append-value]");
-}
-
-/**
- * garrow_time32_array_builder_append_values:
- * @builder: A #GArrowTime32ArrayBuilder.
- * @values: (array length=values_length): The array of
- *   the number of days since UNIX epoch in signed 32bit integer.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_time32_array_builder_append_values(GArrowTime32ArrayBuilder *builder,
-                                          const gint32 *values,
-                                          gint64 values_length,
-                                          const gboolean *is_valids,
-                                          gint64 is_valids_length,
-                                          GError **error)
-{
-  return garrow_array_builder_append_values<arrow::Time32Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     values,
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[time32-array-builder][append-values]");
-}
-
-/**
- * garrow_time32_array_builder_append_null: (skip)
- * @builder: A #GArrowTime32ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.7.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_time32_array_builder_append_null(GArrowTime32ArrayBuilder *builder,
-                                        GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_time32_array_builder_append_nulls: (skip)
- * @builder: A #GArrowTime32ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_time32_array_builder_append_nulls(GArrowTime32ArrayBuilder *builder,
-                                         gint64 n,
-                                         GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowTime64ArrayBuilder,
-              garrow_time64_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_time64_array_builder_init(GArrowTime64ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_time64_array_builder_class_init(GArrowTime64ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_time64_array_builder_new:
- * @data_type: A #GArrowTime64DataType.
- *
- * Returns: A newly created #GArrowTime64ArrayBuilder.
- *
- * Since: 0.7.0
- */
-GArrowTime64ArrayBuilder *
-garrow_time64_array_builder_new(GArrowTime64DataType *data_type)
-{
-  auto arrow_data_type = garrow_data_type_get_raw(GARROW_DATA_TYPE(data_type));
-  auto builder = garrow_array_builder_new(arrow_data_type,
-                                          NULL,
-                                          "[time64-array-builder][new]");
-  return GARROW_TIME64_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_time64_array_builder_append:
- * @builder: A #GArrowTime64ArrayBuilder.
- * @value: The number of milliseconds since UNIX epoch in signed 64bit integer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.7.0
- *
- * Deprecated: 0.12.0:
- *   Use garrow_time64_array_builder_append_value() instead.
- */
-gboolean
-garrow_time64_array_builder_append(GArrowTime64ArrayBuilder *builder,
-                                   gint64 value,
-                                   GError **error)
-{
-  return garrow_time64_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_time64_array_builder_append_value:
- * @builder: A #GArrowTime64ArrayBuilder.
- * @value: The number of milliseconds since UNIX epoch in signed 64bit integer.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_time64_array_builder_append_value(GArrowTime64ArrayBuilder *builder,
-                                         gint64 value,
-                                         GError **error)
-{
-  return garrow_array_builder_append_value<arrow::Time64Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     value,
-     error,
-     "[time64-array-builder][append-value]");
-}
-
-/**
- * garrow_time64_array_builder_append_values:
- * @builder: A #GArrowTime64ArrayBuilder.
- * @values: (array length=values_length): The array of
- *   the number of milliseconds since UNIX epoch in signed 64bit integer.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- */
-gboolean
-garrow_time64_array_builder_append_values(GArrowTime64ArrayBuilder *builder,
-                                          const gint64 *values,
-                                          gint64 values_length,
-                                          const gboolean *is_valids,
-                                          gint64 is_valids_length,
-                                          GError **error)
-{
-  return garrow_array_builder_append_values<arrow::Time64Builder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     reinterpret_cast<const int64_t *>(values),
-     values_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[time64-array-builder][append-values]");
-}
-
-/**
- * garrow_time64_array_builder_append_null: (skip)
- * @builder: A #GArrowTime64ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.7.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_time64_array_builder_append_null(GArrowTime64ArrayBuilder *builder,
-                                        GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_time64_array_builder_append_nulls: (skip)
- * @builder: A #GArrowTime64ArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.8.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_time64_array_builder_append_nulls(GArrowTime64ArrayBuilder *builder,
-                                         gint64 n,
-                                         GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-
-G_DEFINE_TYPE(GArrowBinaryDictionaryArrayBuilder,
-              garrow_binary_dictionary_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_binary_dictionary_array_builder_init(GArrowBinaryDictionaryArrayBuilder *builder)
-{
-}
-
-static void
-garrow_binary_dictionary_array_builder_class_init(GArrowBinaryDictionaryArrayBuilderClass *klass)
-{
-}
-
-
-/**
- * garrow_binary_dictionary_array_builder_new:
- *
- * Returns: A newly created #GArrowBinaryDictionaryArrayBuilder.
- *
- * Since: 2.0.0
- */
-GArrowBinaryDictionaryArrayBuilder *
-garrow_binary_dictionary_array_builder_new(void)
-{
-  // We can use arrow:int8() for the index type of the following arrow_dict_type
-  // because arrow::MakeBuilder creates a dictionary builder with arrow::AdaptiveIntBuilder.
-  auto arrow_dict_type = arrow::dictionary(arrow::int8(), arrow::binary());
-  auto builder = garrow_array_builder_new(arrow_dict_type,
-                                          nullptr,
-                                          "[binary-dictionary-array-builder][new]");
-  return GARROW_BINARY_DICTIONARY_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_binary_dictionary_array_builder_append_null: (skip)
- * @builder: A #GArrowBinaryDictionaryArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_binary_dictionary_array_builder_append_null(GArrowBinaryDictionaryArrayBuilder *builder,
-                                                   GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_binary_dictionary_array_builder_append_value:
- * @builder: A #GArrowBinaryDictionaryArrayBuilder.
- * @value: (array length=length): A binary value.
- * @length: A value length.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_binary_dictionary_array_builder_append_value(GArrowBinaryDictionaryArrayBuilder *builder,
-                                                    const guint8 *value,
-                                                    gint32 length,
-                                                    GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::BinaryDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  auto status = arrow_builder->Append(value, length);
-
-  return garrow_error_check(error,
-                            status,
-                            "[binary-dictionary-array-builder][append-value]");
-}
-
-/**
- * garrow_binary_dictionary_array_builder_append_value_bytes:
- * @builder: A #GArrowBinaryDictionaryArrayBuilder.
- * @value: A binary value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_binary_dictionary_array_builder_append_value_bytes(GArrowBinaryDictionaryArrayBuilder *builder,
-                                                          GBytes *value,
-                                                          GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::BinaryDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  gsize size;
-  auto data = g_bytes_get_data(value, &size);
-  auto status = arrow_builder->Append(static_cast<const uint8_t *>(data),
-                                      size);
-
-  return garrow_error_check(error,
-                            status,
-                            "[binary-dictionary-array-builder][append-value-bytes]");
-}
-
-/**
- * garrow_binary_dictionary_array_builder_append_array:
- * @builder: A #GArrowBinaryDictionaryArrayBuilder.
- * @array: A #GArrowBinaryArray.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_binary_dictionary_array_builder_append_array(GArrowBinaryDictionaryArrayBuilder *builder,
-                                                    GArrowBinaryArray *array,
-                                                    GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::BinaryDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  auto arrow_array = garrow_array_get_raw<arrow::BinaryType>(GARROW_ARRAY(array));
-
-  auto status = arrow_builder->AppendArray(*arrow_array);
-
-  return garrow_error_check(error,
-                            status,
-                            "[binary-dictionary-array-builder][append-binary-array]");
-}
-
-/**
- * garrow_binary_dictionary_array_builder_append_indices:
- * @builder: A #GArrowBinaryDictionaryArrayBuilder.
- * @values: (array length=values_length): The array of indices.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   %TRUE or %FALSE that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append dictionary indices directly without modifying the internal memo.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_binary_dictionary_array_builder_append_indices(GArrowBinaryDictionaryArrayBuilder *builder,
-                                                      const gint64 *values,
-                                                      gint64 values_length,
-                                                      const gboolean *is_valids,
-                                                      gint64 is_valids_length,
-                                                      GError **error)
-{
-  static const char *context = "[binary-dictionary-array-builder][append-indices]";
-  auto arrow_builder =
-    static_cast<arrow::BinaryDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  auto append_function = [&arrow_builder](
-      const gint64 *values,
-      gint64 values_length,
-      const uint8_t *valid_bytes) -> arrow::Status {
-    return arrow_builder->AppendIndices(values, values_length, valid_bytes);
-  };
-  return garrow_array_builder_append_values(values, values_length, is_valids,
-                                            is_valids_length, error, context,
-                                            append_function);
-}
-
-/**
- * garrow_binary_dictionary_array_builder_get_dictionary_length:
- * @builder: A #GArrowBinaryDictionaryArrayBuilder.
- *
- * Returns: A number of entries in the dicitonary.
- *
- * Since: 2.0.0
- */
-gint64 garrow_binary_dictionary_array_builder_get_dictionary_length(GArrowBinaryDictionaryArrayBuilder *builder)
-{
-  auto arrow_builder =
-    static_cast<arrow::BinaryDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  return arrow_builder->dictionary_length();
-}
-
-/**
- * garrow_binary_dictionary_array_builder_finish_delta:
- * @builder: A #GArrowBinaryDictionaryArrayBuilder.
- * @out_indices: (out): The built #GArrowArray containing indices.
- * @out_delta: (out): The built #GArrowArray containing dictionary.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_binary_dictionary_array_builder_finish_delta(GArrowBinaryDictionaryArrayBuilder* builder,
-                                                    GArrowArray **out_indices,
-                                                    GArrowArray **out_delta,
-                                                    GError **error)
-{
-  static const char *context = "[binary-dictionary-array-builder][finish-delta]";
-  auto arrow_builder =
-    static_cast<arrow::BinaryDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  std::shared_ptr<arrow::Array> arrow_indices, arrow_delta;
-  auto status = arrow_builder->FinishDelta(&arrow_indices, &arrow_delta);
-  if (!garrow_error_check(error, status, context)) {
-    return FALSE;
-  }
-  *out_indices = garrow_array_new_raw(&arrow_indices);
-  *out_delta = garrow_array_new_raw(&arrow_delta);
-  return TRUE;
-}
-
-/**
- * garrow_binary_dictionary_array_builder_insert_memo_values:
- * @builder: A #GArrowBinaryDictionaryArrayBuilder.
- * @values: A #GArrowBinaryArray.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_binary_dictionary_array_builder_insert_memo_values(GArrowBinaryDictionaryArrayBuilder *builder,
-                                                          GArrowBinaryArray *values,
-                                                          GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::BinaryDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  auto arrow_array = garrow_array_get_raw<arrow::BinaryType>(GARROW_ARRAY(values));
-
-  auto status = arrow_builder->InsertMemoValues(*arrow_array);
-
-  return garrow_error_check(error,
-                            status,
-                            "[binary-dictionary-array-builder][insert-memo-values]");
-}
-
-/**
- * garrow_binary_dictionary_array_builder_reset_full:
- * @builder: A #GArrowBinaryDictionaryArrayBuilder.
- *
- * Reset and also clear accumulated dictionary values in memo table.
- *
- * Since: 2.0.0
- */
-void
-garrow_binary_dictionary_array_builder_reset_full(GArrowBinaryDictionaryArrayBuilder *builder)
-{
-  auto arrow_builder =
-    static_cast<arrow::BinaryDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  arrow_builder->ResetFull();
-}
-
-
-G_DEFINE_TYPE(GArrowStringDictionaryArrayBuilder,
-              garrow_string_dictionary_array_builder,
-              GARROW_TYPE_ARRAY_BUILDER)
-
-static void
-garrow_string_dictionary_array_builder_init(GArrowStringDictionaryArrayBuilder *builder)
-{
-}
-
-static void
-garrow_string_dictionary_array_builder_class_init(GArrowStringDictionaryArrayBuilderClass *klass)
-{
-}
-
-
-/**
- * garrow_string_dictionary_array_builder_new:
- *
- * Returns: A newly created #GArrowStringDictionaryArrayBuilder.
- *
- * Since: 2.0.0
- */
-GArrowStringDictionaryArrayBuilder *
-garrow_string_dictionary_array_builder_new(void)
-{
-  // We can use arrow:int8() for the index type of the following arrow_dict_type
-  // because arrow::MakeBuilder creates a dictionary builder with arrow::AdaptiveIntBuilder.
-  auto arrow_dict_type = arrow::dictionary(arrow::int8(), arrow::utf8());
-  auto builder = garrow_array_builder_new(arrow_dict_type,
-                                          nullptr,
-                                          "[string-dictionary-array-builder][new]");
-  return GARROW_STRING_DICTIONARY_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_string_dictionary_array_builder_append_null: (skip)
- * @builder: A #GArrowStringDictionaryArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_string_dictionary_array_builder_append_null(GArrowStringDictionaryArrayBuilder *builder,
-                                                   GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_string_dictionary_array_builder_append_string:
- * @builder: A #GArrowStringDictionaryArrayBuilder.
- * @value: A string value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_string_dictionary_array_builder_append_string(GArrowStringDictionaryArrayBuilder *builder,
-                                                     const gchar *value,
-                                                     GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::StringDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  auto status = arrow_builder->Append(value,
-                                      static_cast<guint32>(strlen(value)));
-
-  return garrow_error_check(error,
-                            status,
-                            "[string-dictionary-array-builder][append-string]");
-}
-
-/**
- * garrow_string_dictionary_array_builder_append_array:
- * @builder: A #GArrowStringDictionaryArrayBuilder.
- * @array: A #GArrowStringArray.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_string_dictionary_array_builder_append_array(GArrowStringDictionaryArrayBuilder *builder,
-                                                    GArrowStringArray *array,
-                                                    GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::StringDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  auto arrow_array = garrow_array_get_raw<arrow::StringType>(GARROW_ARRAY(array));
-
-  auto status = arrow_builder->AppendArray(*arrow_array);
-
-  return garrow_error_check(error,
-                            status,
-                            "[string-dictionary-array-builder][append-string-array]");
-}
-
-/**
- * garrow_string_dictionary_array_builder_append_indices:
- * @builder: A #GArrowStringDictionaryArrayBuilder.
- * @values: (array length=values_length): The array of indices.
- * @values_length: The length of `values`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   %TRUE or %FALSE that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append dictionary indices directly without modifying the internal memo.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_string_dictionary_array_builder_append_indices(GArrowStringDictionaryArrayBuilder *builder,
-                                                      const gint64 *values,
-                                                      gint64 values_length,
-                                                      const gboolean *is_valids,
-                                                      gint64 is_valids_length,
-                                                      GError **error)
-{
-  static const char *context = "[string-dictionary-array-builder][append-indices]";
-  auto arrow_builder =
-    static_cast<arrow::StringDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  auto append_function = [&arrow_builder](
-      const gint64 *values,
-      gint64 values_length,
-      const uint8_t *valid_bytes) -> arrow::Status {
-    return arrow_builder->AppendIndices(values, values_length, valid_bytes);
-  };
-  return garrow_array_builder_append_values(values, values_length, is_valids,
-                                            is_valids_length, error, context,
-                                            append_function);
-}
-
-/**
- * garrow_string_dictionary_array_builder_get_dictionary_length:
- * @builder: A #GArrowStringDictionaryArrayBuilder.
- *
- * Returns: A number of entries in the dicitonary.
- *
- * Since: 2.0.0
- */
-gint64 garrow_string_dictionary_array_builder_get_dictionary_length(GArrowStringDictionaryArrayBuilder *builder)
-{
-  auto arrow_builder =
-    static_cast<arrow::StringDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  return arrow_builder->dictionary_length();
-}
-
-/**
- * garrow_string_dictionary_array_builder_finish_delta:
- * @builder: A #GArrowStringDictionaryArrayBuilder.
- * @out_indices: (out): The built #GArrowArray containing indices.
- * @out_delta: (out): The built #GArrowArray containing dictionary.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_string_dictionary_array_builder_finish_delta(GArrowStringDictionaryArrayBuilder* builder,
-                                                    GArrowArray **out_indices,
-                                                    GArrowArray **out_delta,
-                                                    GError **error)
-{
-  static const char *context = "[string-dictionary-array-builder][finish-delta]";
-  auto arrow_builder =
-    static_cast<arrow::StringDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  std::shared_ptr<arrow::Array> arrow_indices, arrow_delta;
-  auto status = arrow_builder->FinishDelta(&arrow_indices, &arrow_delta);
-  if (!garrow_error_check(error, status, context)) {
-    return FALSE;
-  }
-  *out_indices = garrow_array_new_raw(&arrow_indices);
-  *out_delta = garrow_array_new_raw(&arrow_delta);
-  return TRUE;
-}
-
-/**
- * garrow_string_dictionary_array_builder_insert_memo_values:
- * @builder: A #GArrowStringDictionaryArrayBuilder.
- * @values: A #GArrowStringArray.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 2.0.0
- */
-gboolean
-garrow_string_dictionary_array_builder_insert_memo_values(GArrowStringDictionaryArrayBuilder *builder,
-                                                          GArrowStringArray *values,
-                                                          GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::StringDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  auto arrow_array = garrow_array_get_raw<arrow::StringType>(GARROW_ARRAY(values));
-
-  auto status = arrow_builder->InsertMemoValues(*arrow_array);
-
-  return garrow_error_check(error,
-                            status,
-                            "[string-dictionary-array-builder][insert-memo-values]");
-}
-
-/**
- * garrow_string_dictionary_array_builder_reset_full:
- * @builder: A #GArrowStringDictionaryArrayBuilder.
- *
- * Reset and also clear accumulated dictionary values in memo table.
- *
- * Since: 2.0.0
- */
-void
-garrow_string_dictionary_array_builder_reset_full(GArrowStringDictionaryArrayBuilder *builder)
-{
-  auto arrow_builder =
-    static_cast<arrow::StringDictionaryBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-  arrow_builder->ResetFull();
-}
-
-
-typedef struct GArrowListArrayBuilderPrivate_ {
-  GArrowArrayBuilder *value_builder;
-} GArrowListArrayBuilderPrivate;
-
-G_DEFINE_TYPE_WITH_PRIVATE(GArrowListArrayBuilder,
-                           garrow_list_array_builder,
-                           GARROW_TYPE_ARRAY_BUILDER)
-
-#define GARROW_LIST_ARRAY_BUILDER_GET_PRIVATE(obj)         \
-  static_cast<GArrowListArrayBuilderPrivate *>(            \
-     garrow_list_array_builder_get_instance_private(       \
-       GARROW_LIST_ARRAY_BUILDER(obj)))
-
-static void
-garrow_list_array_builder_dispose(GObject *object)
-{
-  auto priv = GARROW_LIST_ARRAY_BUILDER_GET_PRIVATE(object);
-
-  if (priv->value_builder) {
-    g_object_unref(priv->value_builder);
-    priv->value_builder = NULL;
-  }
-
-  G_OBJECT_CLASS(garrow_list_array_builder_parent_class)->dispose(object);
-}
-
-static void
-garrow_list_array_builder_init(GArrowListArrayBuilder *builder)
-{
-}
-
-static void
-garrow_list_array_builder_class_init(GArrowListArrayBuilderClass *klass)
-{
-  GObjectClass *gobject_class;
-
-  gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->dispose = garrow_list_array_builder_dispose;
-}
-
-/**
- * garrow_list_array_builder_new:
- * @data_type: A #GArrowListDataType for value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: A newly created #GArrowListArrayBuilder.
- */
-GArrowListArrayBuilder *
-garrow_list_array_builder_new(GArrowListDataType *data_type,
-                              GError **error)
-{
-  if (!GARROW_IS_LIST_DATA_TYPE(data_type)) {
-    g_set_error(error,
-                GARROW_ERROR,
-                GARROW_ERROR_INVALID,
-                "[list-array-builder][new] data type must be list data type");
-    return NULL;
-  }
-
-  auto arrow_data_type =
-    garrow_data_type_get_raw(GARROW_DATA_TYPE(data_type));
-  auto builder = garrow_array_builder_new(arrow_data_type,
-                                          error,
-                                          "[list-array-builder][new]");
-  return GARROW_LIST_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_list_array_builder_append:
- * @builder: A #GArrowListArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * It appends a new list element. To append a new list element, you
- * need to call this function then append list element values to
- * `value_builder`. `value_builder` is the #GArrowArrayBuilder
- * specified to constructor. You can get `value_builder` by
- * garrow_list_array_builder_get_value_builder().
- *
- * |[<!-- language="C" -->
- * GArrowInt8ArrayBuilder *value_builder;
- * GArrowListArrayBuilder *builder;
- *
- * value_builder = garrow_int8_array_builder_new();
- * builder = garrow_list_array_builder_new(value_builder, NULL);
- *
- * // Start 0th list element: [1, 0, -1]
- * garrow_list_array_builder_append(builder, NULL);
- * garrow_int8_array_builder_append(value_builder, 1);
- * garrow_int8_array_builder_append(value_builder, 0);
- * garrow_int8_array_builder_append(value_builder, -1);
- *
- * // Start 1st list element: [-29, 29]
- * garrow_list_array_builder_append(builder, NULL);
- * garrow_int8_array_builder_append(value_builder, -29);
- * garrow_int8_array_builder_append(value_builder, 29);
- *
- * {
- *   // [[1, 0, -1], [-29, 29]]
- *   GArrowArray *array = garrow_array_builder_finish(builder);
- *   // Now, builder is needless.
- *   g_object_unref(builder);
- *   g_object_unref(value_builder);
- *
- *   // Use array...
- *   g_object_unref(array);
- * }
- * ]|
- *
- * Deprecated: 0.12.0:
- *   Use garrow_list_array_builder_append_value() instead.
- */
-gboolean
-garrow_list_array_builder_append(GArrowListArrayBuilder *builder,
-                                 GError **error)
-{
-  return garrow_list_array_builder_append_value(builder, error);
-}
-
-/**
- * garrow_list_array_builder_append_value:
- * @builder: A #GArrowListArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * It appends a new list element. To append a new list element, you
- * need to call this function then append list element values to
- * `value_builder`. `value_builder` is the #GArrowArrayBuilder
- * specified to constructor. You can get `value_builder` by
- * garrow_list_array_builder_get_value_builder().
- *
- * |[<!-- language="C" -->
- * GArrowInt8ArrayBuilder *value_builder;
- * GArrowListArrayBuilder *builder;
- *
- * value_builder = garrow_int8_array_builder_new();
- * builder = garrow_list_array_builder_new(value_builder, NULL);
- *
- * // Start 0th list element: [1, 0, -1]
- * garrow_list_array_builder_append(builder, NULL);
- * garrow_int8_array_builder_append(value_builder, 1);
- * garrow_int8_array_builder_append(value_builder, 0);
- * garrow_int8_array_builder_append(value_builder, -1);
- *
- * // Start 1st list element: [-29, 29]
- * garrow_list_array_builder_append(builder, NULL);
- * garrow_int8_array_builder_append(value_builder, -29);
- * garrow_int8_array_builder_append(value_builder, 29);
- *
- * {
- *   // [[1, 0, -1], [-29, 29]]
- *   GArrowArray *array = garrow_array_builder_finish(builder);
- *   // Now, builder is needless.
- *   g_object_unref(builder);
- *   g_object_unref(value_builder);
- *
- *   // Use array...
- *   g_object_unref(array);
- * }
- * ]|
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_list_array_builder_append_value(GArrowListArrayBuilder *builder,
-                                       GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::ListBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  auto status = arrow_builder->Append();
-  return garrow_error_check(error, status, "[list-array-builder][append-value]");
-}
-
-/**
- * garrow_list_array_builder_append_null: (skip)
- * @builder: A #GArrowListArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * It appends a new NULL element.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_list_array_builder_append_null(GArrowListArrayBuilder *builder,
-                                      GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_list_array_builder_get_value_builder:
- * @builder: A #GArrowListArrayBuilder.
- *
- * Returns: (transfer none): The #GArrowArrayBuilder for values.
- */
-GArrowArrayBuilder *
-garrow_list_array_builder_get_value_builder(GArrowListArrayBuilder *builder)
-{
-  auto priv = GARROW_LIST_ARRAY_BUILDER_GET_PRIVATE(builder);
-  if (!priv->value_builder) {
-    auto arrow_builder =
-      static_cast<arrow::ListBuilder *>(
-        garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-    auto arrow_value_builder = arrow_builder->value_builder();
-    priv->value_builder = garrow_array_builder_new_raw(arrow_value_builder);
-    garrow_array_builder_release_ownership(priv->value_builder);
-  }
-  return priv->value_builder;
-}
-
-
-typedef struct GArrowLargeListArrayBuilderPrivate_ {
-  GArrowArrayBuilder *value_builder;
-} GArrowLargeListArrayBuilderPrivate;
-
-G_DEFINE_TYPE_WITH_PRIVATE(GArrowLargeListArrayBuilder,
-                           garrow_large_list_array_builder,
-                           GARROW_TYPE_ARRAY_BUILDER)
-
-#define GARROW_LARGE_LIST_ARRAY_BUILDER_GET_PRIVATE(obj)        \
-  static_cast<GArrowLargeListArrayBuilderPrivate *>(            \
-     garrow_large_list_array_builder_get_instance_private(      \
-       GARROW_LARGE_LIST_ARRAY_BUILDER(obj)))
-
-static void
-garrow_large_list_array_builder_dispose(GObject *object)
-{
-  auto priv = GARROW_LARGE_LIST_ARRAY_BUILDER_GET_PRIVATE(object);
-
-  if (priv->value_builder) {
-    g_object_unref(priv->value_builder);
-    priv->value_builder = NULL;
-  }
-
-  G_OBJECT_CLASS(garrow_large_list_array_builder_parent_class)->dispose(object);
-}
-
-static void
-garrow_large_list_array_builder_init(GArrowLargeListArrayBuilder *builder)
-{
-}
-
-static void
-garrow_large_list_array_builder_class_init(GArrowLargeListArrayBuilderClass *klass)
-{
-  auto gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->dispose = garrow_large_list_array_builder_dispose;
-}
-
-/**
- * garrow_large_list_array_builder_new:
- * @data_type: A #GArrowLargeListDataType for value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: A newly created #GArrowLargeListArrayBuilder.
- *
- * Since: 0.16.0
- */
-GArrowLargeListArrayBuilder *
-garrow_large_list_array_builder_new(GArrowLargeListDataType *data_type,
-                                    GError **error)
-{
-  if (!GARROW_IS_LARGE_LIST_DATA_TYPE(data_type)) {
-    g_set_error(error,
-                GARROW_ERROR,
-                GARROW_ERROR_INVALID,
-                "[large-list-array-builder][new] data type must be large list data type");
-    return NULL;
-  }
-
-  auto arrow_data_type =
-    garrow_data_type_get_raw(GARROW_DATA_TYPE(data_type));
-  auto builder = garrow_array_builder_new(arrow_data_type,
-                                          error,
-                                          "[large-list-array-builder][new]");
-  return GARROW_LARGE_LIST_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_large_list_array_builder_append_value:
- * @builder: A #GArrowLargeListArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * It appends a new list element. To append a new list element, you
- * need to call this function then append list element values to
- * `value_builder`. `value_builder` is the #GArrowArrayBuilder
- * specified to constructor. You can get `value_builder` by
- * garrow_large_list_array_builder_get_value_builder().
- *
- * Since: 0.16.0
- */
-gboolean
-garrow_large_list_array_builder_append_value(GArrowLargeListArrayBuilder *builder,
-                                             GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::LargeListBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  auto status = arrow_builder->Append();
-  return garrow_error_check(error, status, "[large-list-array-builder][append-value]");
-}
-
-/**
- * garrow_large_list_array_builder_append_null: (skip)
- * @builder: A #GArrowLargeListArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * It appends a new NULL element.
- *
- * Since: 0.16.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_large_list_array_builder_append_null(GArrowLargeListArrayBuilder *builder,
-                                            GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_large_list_array_builder_get_value_builder:
- * @builder: A #GArrowLargeListArrayBuilder.
- *
- * Returns: (transfer none): The #GArrowArrayBuilder for values.
- *
- * Since: 0.16.0
- */
-GArrowArrayBuilder *
-garrow_large_list_array_builder_get_value_builder(GArrowLargeListArrayBuilder *builder)
-{
-  auto priv = GARROW_LARGE_LIST_ARRAY_BUILDER_GET_PRIVATE(builder);
-  if (!priv->value_builder) {
-    auto arrow_builder =
-      static_cast<arrow::LargeListBuilder *>(
-        garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-    auto arrow_value_builder = arrow_builder->value_builder();
-    priv->value_builder = garrow_array_builder_new_raw(arrow_value_builder);
-    garrow_array_builder_release_ownership(priv->value_builder);
-  }
-  return priv->value_builder;
-}
-
-
-typedef struct GArrowStructArrayBuilderPrivate_ {
-  GList *field_builders;
-} GArrowStructArrayBuilderPrivate;
-
-G_DEFINE_TYPE_WITH_PRIVATE(GArrowStructArrayBuilder,
-                           garrow_struct_array_builder,
-                           GARROW_TYPE_ARRAY_BUILDER)
-
-#define GARROW_STRUCT_ARRAY_BUILDER_GET_PRIVATE(obj)         \
-  static_cast<GArrowStructArrayBuilderPrivate *>(            \
-     garrow_struct_array_builder_get_instance_private(       \
-       GARROW_STRUCT_ARRAY_BUILDER(obj)))
-
-static void
-garrow_struct_array_builder_dispose(GObject *object)
-{
-  auto priv = GARROW_STRUCT_ARRAY_BUILDER_GET_PRIVATE(object);
-
-  for (auto node = priv->field_builders; node; node = g_list_next(node)) {
-    auto field_builder = static_cast<GArrowArrayBuilder *>(node->data);
-    GArrowArrayBuilderPrivate *field_builder_priv;
-
-    field_builder_priv = GARROW_ARRAY_BUILDER_GET_PRIVATE(field_builder);
-    field_builder_priv->array_builder = nullptr;
-    g_object_unref(field_builder);
-  }
-  g_list_free(priv->field_builders);
-  priv->field_builders = NULL;
-
-  G_OBJECT_CLASS(garrow_struct_array_builder_parent_class)->dispose(object);
-}
-
-static void
-garrow_struct_array_builder_init(GArrowStructArrayBuilder *builder)
-{
-}
-
-static void
-garrow_struct_array_builder_class_init(GArrowStructArrayBuilderClass *klass)
-{
-  GObjectClass *gobject_class;
-
-  gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->dispose = garrow_struct_array_builder_dispose;
-}
-
-/**
- * garrow_struct_array_builder_new:
- * @data_type: #GArrowStructDataType for the struct.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: A newly created #GArrowStructArrayBuilder.
- */
-GArrowStructArrayBuilder *
-garrow_struct_array_builder_new(GArrowStructDataType *data_type,
-                                GError **error)
-{
-  if (!GARROW_IS_STRUCT_DATA_TYPE(data_type)) {
-    g_set_error(error,
-                GARROW_ERROR,
-                GARROW_ERROR_INVALID,
-                "[struct-array-builder][new] data type must be struct data type");
-    return NULL;
-  }
-
-  auto arrow_data_type = garrow_data_type_get_raw(GARROW_DATA_TYPE(data_type));
-  auto builder = garrow_array_builder_new(arrow_data_type,
-                                          error,
-                                          "[struct-array-builder][new]");
-  return GARROW_STRUCT_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_struct_array_builder_append:
- * @builder: A #GArrowStructArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * It appends a new struct element. To append a new struct element,
- * you need to call this function then append struct element field
- * values to all `field_builder`s. `field_value`s are the
- * #GArrowArrayBuilder specified to constructor. You can get
- * `field_builder` by garrow_struct_array_builder_get_field_builder()
- * or garrow_struct_array_builder_get_field_builders().
- *
- * |[<!-- language="C" -->
- * // TODO
- * ]|
- *
- * Deprecated: 0.12.0:
- *   Use garrow_struct_array_builder_append_value() instead.
- */
-gboolean
-garrow_struct_array_builder_append(GArrowStructArrayBuilder *builder,
-                                   GError **error)
-{
-  return garrow_struct_array_builder_append_value(builder, error);
-}
-
-/**
- * garrow_struct_array_builder_append_value:
- * @builder: A #GArrowStructArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * It appends a new struct element. To append a new struct element,
- * you need to call this function then append struct element field
- * values to all `field_builder`s. `field_value`s are the
- * #GArrowArrayBuilder specified to constructor. You can get
- * `field_builder` by garrow_struct_array_builder_get_field_builder()
- * or garrow_struct_array_builder_get_field_builders().
- *
- * |[<!-- language="C" -->
- * // TODO
- * ]|
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_struct_array_builder_append_value(GArrowStructArrayBuilder *builder,
-                                         GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::StructBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  auto status = arrow_builder->Append();
-  return garrow_error_check(error,
-                            status,
-                            "[struct-array-builder][append-value]");
-}
-
-/**
- * garrow_struct_array_builder_append_null: (skip)
- * @builder: A #GArrowStructArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * It appends a new NULL element.
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_struct_array_builder_append_null(GArrowStructArrayBuilder *builder,
-                                        GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_struct_array_builder_get_field_builder:
- * @builder: A #GArrowStructArrayBuilder.
- * @i: The index of the field in the struct.
- *
- * Returns: (transfer none): The #GArrowArrayBuilder for the i-th field.
- */
-GArrowArrayBuilder *
-garrow_struct_array_builder_get_field_builder(GArrowStructArrayBuilder *builder,
-                                              gint i)
-{
-  auto field_builders = garrow_struct_array_builder_get_field_builders(builder);
-  auto field_builder = g_list_nth_data(field_builders, i);
-  return static_cast<GArrowArrayBuilder *>(field_builder);
-}
-
-/**
- * garrow_struct_array_builder_get_field_builders:
- * @builder: A #GArrowStructArrayBuilder.
- *
- * Returns: (element-type GArrowArray) (transfer none):
- *   The #GArrowArrayBuilder for all fields.
- */
-GList *
-garrow_struct_array_builder_get_field_builders(GArrowStructArrayBuilder *builder)
-{
-  auto priv = GARROW_STRUCT_ARRAY_BUILDER_GET_PRIVATE(builder);
-  if (!priv->field_builders) {
-    auto arrow_struct_builder =
-      static_cast<arrow::StructBuilder *>(
-        garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-    GList *field_builders = NULL;
-    for (int i = 0; i < arrow_struct_builder->num_fields(); ++i) {
-      auto arrow_field_builder = arrow_struct_builder->field_builder(i);
-      auto field_builder = garrow_array_builder_new_raw(arrow_field_builder);
-      field_builders = g_list_prepend(field_builders, field_builder);
-    }
-    priv->field_builders = g_list_reverse(field_builders);
-  }
-
-  return priv->field_builders;
-}
-
-
-typedef struct GArrowMapArrayBuilderPrivate_ {
-  GArrowArrayBuilder *key_builder;
-  GArrowArrayBuilder *item_builder;
-  GArrowArrayBuilder *value_builder;
-} GArrowMapArrayBuilderPrivate;
-
-G_DEFINE_TYPE_WITH_PRIVATE(GArrowMapArrayBuilder,
-                           garrow_map_array_builder,
-                           GARROW_TYPE_ARRAY_BUILDER)
-
-#define GARROW_MAP_ARRAY_BUILDER_GET_PRIVATE(object)      \
-  static_cast<GArrowMapArrayBuilderPrivate *>(            \
-     garrow_map_array_builder_get_instance_private(       \
-       GARROW_MAP_ARRAY_BUILDER(object)))
-
-static void
-garrow_map_array_builder_dispose(GObject *object)
-{
-  auto priv = GARROW_MAP_ARRAY_BUILDER_GET_PRIVATE(object);
-
-  if (priv->key_builder) {
-    g_object_unref(priv->key_builder);
-    priv->key_builder = NULL;
-  }
-
-  if (priv->item_builder) {
-    g_object_unref(priv->item_builder);
-    priv->item_builder = NULL;
-  }
-
-  if (priv->value_builder) {
-    g_object_unref(priv->value_builder);
-    priv->value_builder = NULL;
-  }
-
-  G_OBJECT_CLASS(garrow_map_array_builder_parent_class)->dispose(object);
-}
-
-static void
-garrow_map_array_builder_init(GArrowMapArrayBuilder *builder)
-{
-}
-
-static void
-garrow_map_array_builder_class_init(GArrowMapArrayBuilderClass *klass)
-{
-  auto gobject_class = G_OBJECT_CLASS(klass);
-
-  gobject_class->dispose = garrow_map_array_builder_dispose;
-}
-
-/**
- * garrow_map_array_builder_new:
- * @data_type: #GArrowMapDataType for the map.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: (nullable): A newly created #GArrowMapArrayBuilder on success,
- *   %NULL on error.
- *
- * Since: 0.17.0
- */
-GArrowMapArrayBuilder *
-garrow_map_array_builder_new(GArrowMapDataType *data_type,
-                             GError **error)
-{
-  if (!GARROW_IS_MAP_DATA_TYPE(data_type)) {
-    g_set_error(error,
-                GARROW_ERROR,
-                GARROW_ERROR_INVALID,
-                "[map-array-builder][new] data type must be map data type");
-    return NULL;
-  }
-
-  auto arrow_data_type = garrow_data_type_get_raw(GARROW_DATA_TYPE(data_type));
-  auto builder = garrow_array_builder_new(arrow_data_type,
-                                          error,
-                                          "[map-array-builder][new]");
-  if (builder) {
-    return GARROW_MAP_ARRAY_BUILDER(builder);
-  } else {
-    return NULL;
-  }
-}
-
-/**
- * garrow_map_array_builder_append_value:
- * @builder: A #GArrowMapArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.17.0
- */
-gboolean
-garrow_map_array_builder_append_value(GArrowMapArrayBuilder *builder,
-                                      GError **error)
-{
-  auto arrow_builder =
-    static_cast<arrow::MapBuilder *>(
-      garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-
-  auto status = arrow_builder->Append();
-  return garrow::check(error,
-                       status,
-                       "[map-array-builder][append-value]");
-}
-
-/**
- * garrow_map_array_builder_append_values:
- * @builder: A #GArrowMapArrayBuilder.
- * @offsets: (array length=offsets_length): The array of signed int.
- * @offsets_length: The length of `offsets`.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth `is_valids` is %TRUE, the Nth `values` is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of `is_valids`.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.17.0
- */
-gboolean
-garrow_map_array_builder_append_values(GArrowMapArrayBuilder *builder,
-                                       const gint32 *offsets,
-                                       gint64 offsets_length,
-                                       const gboolean *is_valids,
-                                       gint64 is_valids_length,
-                                       GError **error)
-{
-  return garrow_array_builder_append_values<arrow::MapBuilder *>
-    (GARROW_ARRAY_BUILDER(builder),
-     reinterpret_cast<const int32_t *>(offsets),
-     offsets_length,
-     is_valids,
-     is_valids_length,
-     error,
-     "[map-array-builder][append-values]");
-}
-
-/**
- * garrow_map_array_builder_append_null: (skip)
- * @builder: A #GArrowMapArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.17.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_map_array_builder_append_null(GArrowMapArrayBuilder *builder,
-                                     GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-/**
- * garrow_map_array_builder_append_nulls: (skip)
- * @builder: A #GArrowMapArrayBuilder.
- * @n: The number of null values to be appended.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple nulls at once. It's more efficient than multiple
- * `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.17.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_nulls() instead.
- */
-gboolean
-garrow_map_array_builder_append_nulls(GArrowMapArrayBuilder *builder,
-                                      gint64 n,
-                                      GError **error)
-{
-  return garrow_array_builder_append_nulls(GARROW_ARRAY_BUILDER(builder),
-                                           n,
-                                           error);
-}
-
-/**
- * garrow_map_array_builder_get_key_builder:
- * @builder: A #GArrowMapArrayBuilder.
- *
- * Returns: (transfer none): The #GArrowArrayBuilder for key values.
- *
- * Since: 0.17.0
- */
-GArrowArrayBuilder *
-garrow_map_array_builder_get_key_builder(GArrowMapArrayBuilder *builder)
-{
-  auto priv = GARROW_MAP_ARRAY_BUILDER_GET_PRIVATE(builder);
-  if (!priv->key_builder) {
-    auto arrow_builder =
-      static_cast<arrow::MapBuilder *>(
-        garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-    auto arrow_key_builder = arrow_builder->key_builder();
-    priv->key_builder = garrow_array_builder_new_raw(arrow_key_builder);
-    garrow_array_builder_release_ownership(priv->key_builder);
-  }
-  return priv->key_builder;
-}
-
-/**
- * garrow_map_array_builder_get_item_builder:
- * @builder: A #GArrowMapArrayBuilder.
- *
- * Returns: (transfer none): The #GArrowArrayBuilder for item values.
- *
- * Since: 0.17.0
- */
-GArrowArrayBuilder *
-garrow_map_array_builder_get_item_builder(GArrowMapArrayBuilder *builder)
-{
-  auto priv = GARROW_MAP_ARRAY_BUILDER_GET_PRIVATE(builder);
-  if (!priv->item_builder) {
-    auto arrow_builder =
-      static_cast<arrow::MapBuilder *>(
-        garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-    auto arrow_item_builder = arrow_builder->item_builder();
-    priv->item_builder = garrow_array_builder_new_raw(arrow_item_builder);
-    garrow_array_builder_release_ownership(priv->item_builder);
-  }
-  return priv->item_builder;
-}
-
-/**
- * garrow_map_array_builder_get_value_builder:
- * @builder: A #GArrowMapArrayBuilder.
- *
- * Returns: (transfer none): The #GArrowArrayBuilder to add map entries as struct values.
- *   This can be used instead of garrow_map_array_builder_get_key_builder() and
- *   garrow_map_array_builder_get_item_builder(). You can build map entries as a list of
- *   struct values with this builder.
- *
- * Since: 0.17.0
- */
-GArrowArrayBuilder *
-garrow_map_array_builder_get_value_builder(GArrowMapArrayBuilder *builder)
-{
-  auto priv = GARROW_MAP_ARRAY_BUILDER_GET_PRIVATE(builder);
-  if (!priv->value_builder) {
-    auto arrow_builder =
-      static_cast<arrow::MapBuilder *>(
-        garrow_array_builder_get_raw(GARROW_ARRAY_BUILDER(builder)));
-    auto arrow_value_builder = arrow_builder->value_builder();
-    priv->value_builder = garrow_array_builder_new_raw(arrow_value_builder);
-    garrow_array_builder_release_ownership(priv->value_builder);
-  }
-  return priv->value_builder;
-}
-
-
-G_DEFINE_TYPE(GArrowDecimal128ArrayBuilder,
-              garrow_decimal128_array_builder,
-              GARROW_TYPE_FIXED_SIZE_BINARY_ARRAY_BUILDER)
-
-static void
-garrow_decimal128_array_builder_init(GArrowDecimal128ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_decimal128_array_builder_class_init(GArrowDecimal128ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_decimal128_array_builder_new:
- * @data_type: #GArrowDecimal128DataType for the decimal.
- *
- * Returns: A newly created #GArrowDecimal128ArrayBuilder.
- *
- * Since: 0.10.0
- */
-GArrowDecimal128ArrayBuilder *
-garrow_decimal128_array_builder_new(GArrowDecimal128DataType *data_type)
-{
-  auto arrow_data_type = garrow_data_type_get_raw(GARROW_DATA_TYPE(data_type));
-  auto builder = garrow_array_builder_new(arrow_data_type,
-                                          NULL,
-                                          "[decimal128-array-builder][new]");
-  return GARROW_DECIMAL128_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_decimal128_array_builder_append:
- * @builder: A #GArrowDecimal128ArrayBuilder.
- * @value: A decimal value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.10.0
- *
- * Deprecated: 0.12.0:
- *   Use garrow_decimal128_array_builder_append_value() instead.
- */
-gboolean
-garrow_decimal128_array_builder_append(GArrowDecimal128ArrayBuilder *builder,
-                                       GArrowDecimal128 *value,
-                                       GError **error)
-{
-  return garrow_decimal128_array_builder_append_value(builder, value, error);
-}
-
-/**
- * garrow_decimal128_array_builder_append_value:
- * @builder: A #GArrowDecimal128ArrayBuilder.
- * @value: (nullable): A decimal value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 0.12.0
- */
-gboolean
-garrow_decimal128_array_builder_append_value(GArrowDecimal128ArrayBuilder *builder,
-                                             GArrowDecimal128 *value,
-                                             GError **error)
-{
-  if (value) {
-    auto arrow_decimal = garrow_decimal128_get_raw(value);
-    return garrow_array_builder_append_value<arrow::Decimal128Builder *>
-      (GARROW_ARRAY_BUILDER(builder),
-       *arrow_decimal,
-       error,
-       "[decimal128-array-builder][append-value]");
-  } else {
-    return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder),
-                                            error);
-  }
-}
-
-/**
- * garrow_decimal128_array_builder_append_values:
- * @builder: A #GArrowDecimal128ArrayBuilder.
- * @values: (array length=values_length): The array of #GArrowDecimal128.
- * @values_length: The length of @values.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth @is_valids is %TRUE, the Nth @values is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of @is_valids.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_decimal128_array_builder_append_values(
-  GArrowDecimal128ArrayBuilder *builder,
-  GArrowDecimal128 **values,
-  gint64 values_length,
-  const gboolean *is_valids,
-  gint64 is_valids_length,
-  GError **error)
-{
-  return garrow_array_builder_append_values(
-    GARROW_ARRAY_BUILDER(builder),
-    values,
-    values_length,
-    is_valids,
-    is_valids_length,
-    error,
-    "[decimal128-array-builder][append-values]",
-    [](guint8 *output, GArrowDecimal128 *value, gsize size) {
-      auto arrow_decimal = garrow_decimal128_get_raw(value);
-      arrow_decimal->ToBytes(output);
-    });
-}
-
-/**
- * garrow_decimal128_array_builder_append_null: (skip)
- * @builder: A #GArrowDecimal128ArrayBuilder.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * It appends a new NULL element.
- *
- * Since: 0.12.0
- *
- * Deprecated: 3.0.0:
- *   Use garrow_array_builder_append_null() instead.
- */
-gboolean
-garrow_decimal128_array_builder_append_null(GArrowDecimal128ArrayBuilder *builder,
-                                            GError **error)
-{
-  return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder), error);
-}
-
-
-G_DEFINE_TYPE(GArrowDecimal256ArrayBuilder,
-              garrow_decimal256_array_builder,
-              GARROW_TYPE_FIXED_SIZE_BINARY_ARRAY_BUILDER)
-
-static void
-garrow_decimal256_array_builder_init(GArrowDecimal256ArrayBuilder *builder)
-{
-}
-
-static void
-garrow_decimal256_array_builder_class_init(GArrowDecimal256ArrayBuilderClass *klass)
-{
-}
-
-/**
- * garrow_decimal256_array_builder_new:
- * @data_type: #GArrowDecimal256DataType for the decimal.
- *
- * Returns: A newly created #GArrowDecimal256ArrayBuilder.
- *
- * Since: 3.0.0
- */
-GArrowDecimal256ArrayBuilder *
-garrow_decimal256_array_builder_new(GArrowDecimal256DataType *data_type)
-{
-  auto arrow_data_type = garrow_data_type_get_raw(GARROW_DATA_TYPE(data_type));
-  auto builder = garrow_array_builder_new(arrow_data_type,
-                                          NULL,
-                                          "[decimal256-array-builder][new]");
-  return GARROW_DECIMAL256_ARRAY_BUILDER(builder);
-}
-
-/**
- * garrow_decimal256_array_builder_append_value:
- * @builder: A #GArrowDecimal256ArrayBuilder.
- * @value: (nullable): A decimal value.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_decimal256_array_builder_append_value(GArrowDecimal256ArrayBuilder *builder,
-                                             GArrowDecimal256 *value,
-                                             GError **error)
-{
-  if (value) {
-    auto arrow_decimal = garrow_decimal256_get_raw(value);
-    return garrow_array_builder_append_value<arrow::Decimal256Builder *>
-      (GARROW_ARRAY_BUILDER(builder),
-       *arrow_decimal,
-       error,
-       "[decimal256-array-builder][append-value]");
-  } else {
-    return garrow_array_builder_append_null(GARROW_ARRAY_BUILDER(builder),
-                                            error);
-  }
-}
-
-/**
- * garrow_decimal256_array_builder_append_values:
- * @builder: A #GArrowDecimal256ArrayBuilder.
- * @values: (array length=values_length): The array of #GArrowDecimal256.
- * @values_length: The length of @values.
- * @is_valids: (nullable) (array length=is_valids_length): The array of
- *   boolean that shows whether the Nth value is valid or not. If the
- *   Nth @is_valids is %TRUE, the Nth @values is valid value. Otherwise
- *   the Nth value is null value.
- * @is_valids_length: The length of @is_valids.
- * @error: (nullable): Return location for a #GError or %NULL.
- *
- * Append multiple values at once. It's more efficient than multiple
- * `append` and `append_null` calls.
- *
- * Returns: %TRUE on success, %FALSE if there was an error.
- *
- * Since: 3.0.0
- */
-gboolean
-garrow_decimal256_array_builder_append_values(
-  GArrowDecimal256ArrayBuilder *builder,
-  GArrowDecimal256 **values,
-  gint64 values_length,
-  const gboolean *is_valids,
-  gint64 is_valids_length,
-  GError **error)
-{
-  return garrow_array_builder_append_values(
-    GARROW_ARRAY_BUILDER(builder),
-    values,
-    values_length,
-    is_valids,
-    is_valids_length,
-    error,
-    "[decimal256-array-builder][append-values]",
-    [](guint8 *output, GArrowDecimal256 *value, gsize size) {
-      auto arrow_decimal = garrow_decimal256_get_raw(value);
-      arrow_decimal->ToBytes(output);
-    });
-}
-
-
-G_END_DECLS
-
-GArrowArrayBuilder *
-garrow_array_builder_new_raw(arrow::ArrayBuilder *arrow_builder,
-                             GType type)
-{
-  if (type == G_TYPE_INVALID) {
-    switch (arrow_builder->type()->id()) {
-    case arrow::Type::type::NA:
-      type = GARROW_TYPE_NULL_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::BOOL:
-      type = GARROW_TYPE_BOOLEAN_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::UINT8:
-      type = GARROW_TYPE_UINT8_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::INT8:
-      type = GARROW_TYPE_INT8_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::UINT16:
-      type = GARROW_TYPE_UINT16_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::INT16:
-      type = GARROW_TYPE_INT16_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::UINT32:
-      type = GARROW_TYPE_UINT32_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::INT32:
-      type = GARROW_TYPE_INT32_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::UINT64:
-      type = GARROW_TYPE_UINT64_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::INT64:
-      type = GARROW_TYPE_INT64_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::FLOAT:
-      type = GARROW_TYPE_FLOAT_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::DOUBLE:
-      type = GARROW_TYPE_DOUBLE_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::BINARY:
-      type = GARROW_TYPE_BINARY_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::LARGE_BINARY:
-      type = GARROW_TYPE_LARGE_BINARY_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::STRING:
-      type = GARROW_TYPE_STRING_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::LARGE_STRING:
-      type = GARROW_TYPE_LARGE_STRING_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::FIXED_SIZE_BINARY:
-      type = GARROW_TYPE_FIXED_SIZE_BINARY_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::DATE32:
-      type = GARROW_TYPE_DATE32_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::DATE64:
-      type = GARROW_TYPE_DATE64_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::TIMESTAMP:
-      type = GARROW_TYPE_TIMESTAMP_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::TIME32:
-      type = GARROW_TYPE_TIME32_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::TIME64:
-      type = GARROW_TYPE_TIME64_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::LIST:
-      type = GARROW_TYPE_LIST_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::LARGE_LIST:
-      type = GARROW_TYPE_LARGE_LIST_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::STRUCT:
-      type = GARROW_TYPE_STRUCT_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::MAP:
-      type = GARROW_TYPE_MAP_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::DECIMAL128:
-      type = GARROW_TYPE_DECIMAL128_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::DECIMAL256:
-      type = GARROW_TYPE_DECIMAL256_ARRAY_BUILDER;
-      break;
-    case arrow::Type::type::DICTIONARY:
-      {
-        const auto& dict_type =
-          arrow::internal::checked_cast<arrow::DictionaryType&>(*arrow_builder->type());
-        switch (dict_type.value_type()->id()) {
-          case arrow::Type::type::BINARY:
-            type = GARROW_TYPE_BINARY_DICTIONARY_ARRAY_BUILDER;
-            break;
-          case arrow::Type::type::STRING:
-            type = GARROW_TYPE_STRING_DICTIONARY_ARRAY_BUILDER;
-            break;
-          default:
-            type = GARROW_TYPE_ARRAY_BUILDER;
-            break;
-        }
-      }
-      break;
-    default:
-      type = GARROW_TYPE_ARRAY_BUILDER;
-      break;
-    }
-  }
-
-  auto builder =
-    GARROW_ARRAY_BUILDER(g_object_new(type,
-                                      "array-builder", arrow_builder,
-                                      NULL));
-  return builder;
-}
-
-arrow::ArrayBuilder *
-garrow_array_builder_get_raw(GArrowArrayBuilder *builder)
-{
-  auto priv = GARROW_ARRAY_BUILDER_GET_PRIVATE(builder);
-  return priv->array_builder;
-}
diff --git a/c_glib/arrow-glib/array-builder.h b/c_glib/arrow-glib/array-builder.h
deleted file mode 100644
index 7ab7a4c..0000000
--- a/c_glib/arrow-glib/array-builder.h
+++ /dev/null
@@ -1,1387 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow-glib/array.h>
-#include <arrow-glib/decimal.h>
-
-G_BEGIN_DECLS
-
-#define GARROW_TYPE_ARRAY_BUILDER (garrow_array_builder_get_type())
-G_DECLARE_DERIVABLE_TYPE(GArrowArrayBuilder,
-                         garrow_array_builder,
-                         GARROW,
-                         ARRAY_BUILDER,
-                         GObject)
-struct _GArrowArrayBuilderClass
-{
-  GObjectClass parent_class;
-};
-
-void garrow_array_builder_release_ownership(GArrowArrayBuilder *builder);
-
-GArrowDataType *
-garrow_array_builder_get_value_data_type(GArrowArrayBuilder *builder);
-GArrowType garrow_array_builder_get_value_type(GArrowArrayBuilder *builder);
-
-GArrowArray *garrow_array_builder_finish(GArrowArrayBuilder *builder,
-                                         GError **error);
-
-GARROW_AVAILABLE_IN_2_0
-void garrow_array_builder_reset(GArrowArrayBuilder *builder);
-
-GARROW_AVAILABLE_IN_2_0
-gint64 garrow_array_builder_get_capacity(GArrowArrayBuilder *builder);
-GARROW_AVAILABLE_IN_2_0
-gint64 garrow_array_builder_get_length(GArrowArrayBuilder *builder);
-GARROW_AVAILABLE_IN_2_0
-gint64 garrow_array_builder_get_n_nulls(GArrowArrayBuilder *builder);
-
-GARROW_AVAILABLE_IN_2_0
-gboolean garrow_array_builder_resize(GArrowArrayBuilder *builder,
-                                     gint64 capacity,
-                                     GError **error);
-GARROW_AVAILABLE_IN_2_0
-gboolean garrow_array_builder_reserve(GArrowArrayBuilder *builder,
-                                      gint64 additional_capacity,
-                                      GError **error);
-
-GARROW_AVAILABLE_IN_3_0
-gboolean garrow_array_builder_append_null(GArrowArrayBuilder *builder,
-                                          GError **error);
-GARROW_AVAILABLE_IN_3_0
-gboolean garrow_array_builder_append_nulls(GArrowArrayBuilder *builder,
-                                           gint64 n,
-                                           GError **error);
-GARROW_AVAILABLE_IN_3_0
-gboolean garrow_array_builder_append_empty_value(GArrowArrayBuilder *builder,
-                                                 GError **error);
-GARROW_AVAILABLE_IN_3_0
-gboolean garrow_array_builder_append_empty_values(GArrowArrayBuilder *builder,
-                                                  gint64 n,
-                                                  GError **error);
-
-#define GARROW_TYPE_NULL_ARRAY_BUILDER (garrow_null_array_builder_get_type())
-G_DECLARE_DERIVABLE_TYPE(GArrowNullArrayBuilder,
-                         garrow_null_array_builder,
-                         GARROW,
-                         NULL_ARRAY_BUILDER,
-                         GArrowArrayBuilder)
-struct _GArrowNullArrayBuilderClass
-{
-  GArrowArrayBuilderClass parent_class;
-};
-
-GARROW_AVAILABLE_IN_0_13
-GArrowNullArrayBuilder *garrow_null_array_builder_new(void);
-
-#ifndef GARROW_DISABLE_DEPRECATED
-GARROW_DEPRECATED_IN_3_0_FOR(garrow_array_builder_append_null)
-GARROW_AVAILABLE_IN_0_13
-gboolean garrow_null_array_builder_append_null(GArrowNullArrayBuilder *builder,
-                                               GError **error);
-GARROW_DEPRECATED_IN_3_0_FOR(garrow_array_builder_append_nulls)
-GARROW_AVAILABLE_IN_0_13
-gboolean garrow_null_array_builder_append_nulls(GArrowNullArrayBuilder *builder,
-                                                gint64 n,
-                                                GError **error);
-#endif
-
-
-#define GARROW_TYPE_BOOLEAN_ARRAY_BUILDER       \
-  (garrow_boolean_array_builder_get_type())
... 61036 lines suppressed ...

[arrow-rs] 05/14: Removed julia.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit 31f7e93f4e67562222d66d764cb207f4d44b3840
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:20:16 2021 +0000

    Removed julia.
---
 julia/Arrow/.gitignore                             |     6 -
 julia/Arrow/LICENSE.md                             |    15 -
 julia/Arrow/Project.toml                           |    37 -
 julia/Arrow/README.md                              |    55 -
 julia/Arrow/docs/.gitignore                        |     2 -
 julia/Arrow/docs/Manifest.toml                     |   204 -
 julia/Arrow/docs/Project.toml                      |     3 -
 julia/Arrow/docs/make.jl                           |    24 -
 julia/Arrow/docs/src/index.md                      |    10 -
 julia/Arrow/docs/src/manual.md                     |   150 -
 julia/Arrow/docs/src/reference.md                  |     6 -
 julia/Arrow/src/Arrow.jl                           |   107 -
 julia/Arrow/src/FlatBuffers/FlatBuffers.jl         |   153 -
 julia/Arrow/src/FlatBuffers/builder.jl             |   440 -
 julia/Arrow/src/FlatBuffers/table.jl               |   170 -
 julia/Arrow/src/arraytypes/arraytypes.jl           |   190 -
 julia/Arrow/src/arraytypes/bool.jl                 |   111 -
 julia/Arrow/src/arraytypes/compressed.jl           |    90 -
 julia/Arrow/src/arraytypes/dictencoding.jl         |   248 -
 julia/Arrow/src/arraytypes/fixedsizelist.jl        |   153 -
 julia/Arrow/src/arraytypes/list.jl                 |   209 -
 julia/Arrow/src/arraytypes/map.jl                  |   115 -
 julia/Arrow/src/arraytypes/primitive.jl            |   106 -
 julia/Arrow/src/arraytypes/struct.jl               |   130 -
 julia/Arrow/src/arraytypes/unions.jl               |   279 -
 julia/Arrow/src/arrowtypes.jl                      |   166 -
 julia/Arrow/src/eltypes.jl                         |   415 -
 julia/Arrow/src/metadata/File.jl                   |    90 -
 julia/Arrow/src/metadata/Flatbuf.jl                |    25 -
 julia/Arrow/src/metadata/Message.jl                |   202 -
 julia/Arrow/src/metadata/Schema.jl                 |   610 -
 julia/Arrow/src/table.jl                           |   556 -
 julia/Arrow/src/utils.jl                           |   200 -
 julia/Arrow/src/write.jl                           |   456 -
 julia/Arrow/test/arrowjson.jl                      |   611 -
 julia/Arrow/test/arrowjson/datetime.json           |   911 -
 julia/Arrow/test/arrowjson/decimal.json            | 32948 -------------------
 julia/Arrow/test/arrowjson/dictionary.json         |   422 -
 .../Arrow/test/arrowjson/dictionary_unsigned.json  |   323 -
 julia/Arrow/test/arrowjson/map.json                |   291 -
 julia/Arrow/test/arrowjson/nested.json             |   537 -
 julia/Arrow/test/arrowjson/primitive-empty.json    |   879 -
 julia/Arrow/test/arrowjson/primitive.json          |  1890 --
 .../Arrow/test/arrowjson/primitive_no_batches.json |   287 -
 julia/Arrow/test/dates.jl                          |    61 -
 julia/Arrow/test/integrationtest.jl                |    49 -
 julia/Arrow/test/pyarrow_roundtrip.jl              |    74 -
 julia/Arrow/test/runtests.jl                       |   218 -
 julia/Arrow/test/testtables.jl                     |   252 -
 49 files changed, 45486 deletions(-)

diff --git a/julia/Arrow/.gitignore b/julia/Arrow/.gitignore
deleted file mode 100644
index 2a9b243..0000000
--- a/julia/Arrow/.gitignore
+++ /dev/null
@@ -1,6 +0,0 @@
-Manifest.toml
-*.jl.cov
-*.jl.*.cov
-*.jl.mem
-
-test/_scrap.jl
diff --git a/julia/Arrow/LICENSE.md b/julia/Arrow/LICENSE.md
deleted file mode 100644
index 136e1c6..0000000
--- a/julia/Arrow/LICENSE.md
+++ /dev/null
@@ -1,15 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
\ No newline at end of file
diff --git a/julia/Arrow/Project.toml b/julia/Arrow/Project.toml
deleted file mode 100644
index 17a3be0..0000000
--- a/julia/Arrow/Project.toml
+++ /dev/null
@@ -1,37 +0,0 @@
-name = "Arrow"
-uuid = "69666777-d1a9-59fb-9406-91d4454c9d45"
-authors = ["quinnj <qu...@gmail.com>"]
-version = "1.1.0"
-
-[deps]
-BitIntegers = "c3b6d118-76ef-56ca-8cc7-ebb389d030a1"
-CodecLz4 = "5ba52731-8f18-5e0d-9241-30f10d1ec561"
-CodecZstd = "6b39b394-51ab-5f42-8807-6242bab2b4c2"
-DataAPI = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a"
-Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
-Mmap = "a63ad114-7e13-5084-954f-fe012c677804"
-PooledArrays = "2dfb63ee-cc39-5dd5-95bd-886bf059d720"
-SentinelArrays = "91c51154-3ec4-41a3-a24f-3f23e20d615c"
-Tables = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
-TimeZones = "f269a46b-ccf7-5d73-abea-4c690281aa53"
-UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
-
-[compat]
-BitIntegers = "0.2"
-CodecLz4 = "0.4"
-CodecZstd = "0.7"
-DataAPI = "1"
-PooledArrays = "0.5"
-SentinelArrays = "1"
-Tables = "1.1"
-TimeZones = "1"
-julia = "1.3"
-
-[extras]
-JSON3 = "0f8b85d8-7281-11e9-16c2-39a750bddbf1"
-Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
-StructTypes = "856f2bd8-1eba-4b0a-8007-ebc267875bd4"
-Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
-
-[targets]
-test = ["Test", "Random", "JSON3", "StructTypes"]
diff --git a/julia/Arrow/README.md b/julia/Arrow/README.md
deleted file mode 100644
index 90d0477..0000000
--- a/julia/Arrow/README.md
+++ /dev/null
@@ -1,55 +0,0 @@
-# Arrow
-
-[![docs](https://img.shields.io/badge/docs-latest-blue&logo=julia)](https://arrow.juliadata.org/dev/)
-[![CI](https://github.com/JuliaData/Arrow.jl/workflows/CI/badge.svg)](https://github.com/JuliaData/Arrow.jl/actions?query=workflow%3ACI)
-[![codecov](https://codecov.io/gh/JuliaData/Arrow.jl/branch/master/graph/badge.svg)](https://codecov.io/gh/JuliaData/Arrow.jl)
-
-[![deps](https://juliahub.com/docs/Arrow/deps.svg)](https://juliahub.com/ui/Packages/Arrow/QnF3w?t=2)
-[![version](https://juliahub.com/docs/Arrow/version.svg)](https://juliahub.com/ui/Packages/Arrow/QnF3w)
-[![pkgeval](https://juliahub.com/docs/Arrow/pkgeval.svg)](https://juliahub.com/ui/Packages/Arrow/QnF3w)
-
-This is a pure Julia implementation of the [Apache Arrow](https://arrow.apache.org) data standard.  This package provides Julia `AbstractVector` objects for
-referencing data that conforms to the Arrow standard.  This allows users to seamlessly interface Arrow formatted data with a great deal of existing Julia code.
-
-Please see this [document](https://arrow.apache.org/docs/format/Columnar.html#physical-memory-layout) for a description of the Arrow memory layout.
-
-## Installation
-
-The package can be installed by typing in the following in a Julia REPL:
-
-```julia
-julia> using Pkg; Pkg.add(url="https://github.com/apache/arrow", subdir="julia/Arrow", rev="apache-arrow-3.0.0")
-```
-
-or to use the non-official-apache code that will sometimes include bugfix patches between apache releases, you can do:
-
-```julia
-julia> using Pkg; Pkg.add("Arrow")
-```
-
-## Difference between this code and the JuliaData/Arrow.jl repository
-
-This code is officially part of the apache/arrow repository and as such follows the regulated release cadence of the entire project, following standard community
-voting protocols. The JuliaData/Arrow.jl repository can be viewed as a sort of "dev" or "latest" branch of this code that may release more frequently, but without following
-official apache release guidelines. The two repositories are synced, however, so any bugfix patches in JuliaData will be upstreamed to apache/arrow for each release.
-
-## Format Support
-
-This implementation supports the 1.0 version of the specification, including support for:
-  * All primitive data types
-  * All nested data types
-  * Dictionary encodings and messages
-  * Extension types
-  * Streaming, file, record batch, and replacement and isdelta dictionary messages
-
-It currently doesn't include support for:
-  * Tensors or sparse tensors
-  * Flight RPC
-  * C data interface
-
-Third-party data formats:
-  * csv and parquet support via the existing CSV.jl and Parquet.jl packages
-  * Other Tables.jl-compatible packages automatically supported (DataFrames.jl, JSONTables.jl, JuliaDB.jl, SQLite.jl, MySQL.jl, JDBC.jl, ODBC.jl, XLSX.jl, etc.)
-  * No current Julia packages support ORC or Avro data formats
-
-See the [full documentation](https://arrow.juliadata.org/dev/) for details on reading and writing arrow data.
diff --git a/julia/Arrow/docs/.gitignore b/julia/Arrow/docs/.gitignore
deleted file mode 100644
index a303fff..0000000
--- a/julia/Arrow/docs/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-build/
-site/
diff --git a/julia/Arrow/docs/Manifest.toml b/julia/Arrow/docs/Manifest.toml
deleted file mode 100644
index 69420b1..0000000
--- a/julia/Arrow/docs/Manifest.toml
+++ /dev/null
@@ -1,204 +0,0 @@
-# This file is machine-generated - editing it directly is not advised
-
-[[ArgTools]]
-uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f"
-
-[[Arrow]]
-deps = ["CodecLz4", "CodecZstd", "DataAPI", "Dates", "Mmap", "PooledArrays", "SentinelArrays", "Tables"]
-git-tree-sha1 = "76641f71ac332cd4d3cf54b98234a0f597bd7a2f"
-uuid = "69666777-d1a9-59fb-9406-91d4454c9d45"
-version = "0.3.0"
-
-[[Artifacts]]
-uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
-
-[[Base64]]
-uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
-
-[[CodecLz4]]
-deps = ["Lz4_jll", "TranscodingStreams"]
-git-tree-sha1 = "59fe0cb37784288d6b9f1baebddbf75457395d40"
-uuid = "5ba52731-8f18-5e0d-9241-30f10d1ec561"
-version = "0.4.0"
-
-[[CodecZstd]]
-deps = ["TranscodingStreams", "Zstd_jll"]
-git-tree-sha1 = "d19cd9ae79ef31774151637492291d75194fc5fa"
-uuid = "6b39b394-51ab-5f42-8807-6242bab2b4c2"
-version = "0.7.0"
-
-[[DataAPI]]
-git-tree-sha1 = "176e23402d80e7743fc26c19c681bfb11246af32"
-uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a"
-version = "1.3.0"
-
-[[DataValueInterfaces]]
-git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6"
-uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464"
-version = "1.0.0"
-
-[[Dates]]
-deps = ["Printf"]
-uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
-
-[[DocStringExtensions]]
-deps = ["LibGit2", "Markdown", "Pkg", "Test"]
-git-tree-sha1 = "50ddf44c53698f5e784bbebb3f4b21c5807401b1"
-uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
-version = "0.8.3"
-
-[[Documenter]]
-deps = ["Base64", "Dates", "DocStringExtensions", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"]
-git-tree-sha1 = "fb1ff838470573adc15c71ba79f8d31328f035da"
-uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
-version = "0.25.2"
-
-[[DocumenterMarkdown]]
-deps = ["Documenter", "Test"]
-git-tree-sha1 = "c302ba512683c3db462ee4eff718ae6fedcbf380"
-uuid = "997ab1e6-3595-5248-9280-8efb232c3433"
-version = "0.2.0"
-
-[[Downloads]]
-deps = ["ArgTools", "LibCURL"]
-uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
-
-[[InteractiveUtils]]
-deps = ["Markdown"]
-uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
-
-[[IteratorInterfaceExtensions]]
-git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856"
-uuid = "82899510-4779-5014-852e-03e436cf321d"
-version = "1.0.0"
-
-[[JLLWrappers]]
-git-tree-sha1 = "c70593677bbf2c3ccab4f7500d0f4dacfff7b75c"
-uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210"
-version = "1.1.3"
-
-[[JSON]]
-deps = ["Dates", "Mmap", "Parsers", "Unicode"]
-git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4"
-uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
-version = "0.21.1"
-
-[[LibCURL]]
-deps = ["LibCURL_jll", "MozillaCACerts_jll"]
-uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21"
-
-[[LibCURL_jll]]
-deps = ["Libdl"]
-uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0"
-
-[[LibGit2]]
-deps = ["Printf"]
-uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
-
-[[Libdl]]
-uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
-
-[[LinearAlgebra]]
-deps = ["Libdl"]
-uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
-
-[[Logging]]
-uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
-
-[[Lz4_jll]]
-deps = ["Libdl", "Pkg"]
-git-tree-sha1 = "51b1db0732bbdcfabb60e36095cc3ed9c0016932"
-uuid = "5ced341a-0733-55b8-9ab6-a4889d929147"
-version = "1.9.2+2"
-
-[[Markdown]]
-deps = ["Base64"]
-uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
-
-[[Mmap]]
-uuid = "a63ad114-7e13-5084-954f-fe012c677804"
-
-[[MozillaCACerts_jll]]
-uuid = "14a3606d-f60d-562e-9121-12d972cd8159"
-
-[[Parsers]]
-deps = ["Dates"]
-git-tree-sha1 = "6fa4202675c05ba0f8268a6ddf07606350eda3ce"
-uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
-version = "1.0.11"
-
-[[Pkg]]
-deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "UUIDs"]
-uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
-
-[[PooledArrays]]
-deps = ["DataAPI"]
-git-tree-sha1 = "b1333d4eced1826e15adbdf01a4ecaccca9d353c"
-uuid = "2dfb63ee-cc39-5dd5-95bd-886bf059d720"
-version = "0.5.3"
-
-[[Printf]]
-deps = ["Unicode"]
-uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
-
-[[REPL]]
-deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"]
-uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
-
-[[Random]]
-deps = ["Serialization"]
-uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
-
-[[SHA]]
-uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
-
-[[SentinelArrays]]
-deps = ["Dates", "Random"]
-git-tree-sha1 = "6ccde405cf0759eba835eb613130723cb8f10ff9"
-uuid = "91c51154-3ec4-41a3-a24f-3f23e20d615c"
-version = "1.2.16"
-
-[[Serialization]]
-uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
-
-[[Sockets]]
-uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
-
-[[TOML]]
-deps = ["Dates"]
-uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
-
-[[TableTraits]]
-deps = ["IteratorInterfaceExtensions"]
-git-tree-sha1 = "b1ad568ba658d8cbb3b892ed5380a6f3e781a81e"
-uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c"
-version = "1.0.0"
-
-[[Tables]]
-deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "TableTraits", "Test"]
-git-tree-sha1 = "24a584cf65e2cfabdadc21694fb69d2e74c82b44"
-uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
-version = "1.1.0"
-
-[[Test]]
-deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
-uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
-
-[[TranscodingStreams]]
-deps = ["Random", "Test"]
-git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
-uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
-version = "0.9.5"
-
-[[UUIDs]]
-deps = ["Random", "SHA"]
-uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
-
-[[Unicode]]
-uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
-
-[[Zstd_jll]]
-deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
-git-tree-sha1 = "6f1abcb0c44f184690912aa4b0ba861dd64f11b9"
-uuid = "3161d3a3-bdf6-5164-811a-617609db77b4"
-version = "1.4.5+2"
diff --git a/julia/Arrow/docs/Project.toml b/julia/Arrow/docs/Project.toml
deleted file mode 100644
index 623cab2..0000000
--- a/julia/Arrow/docs/Project.toml
+++ /dev/null
@@ -1,3 +0,0 @@
-[deps]
-Arrow = "69666777-d1a9-59fb-9406-91d4454c9d45"
-Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
diff --git a/julia/Arrow/docs/make.jl b/julia/Arrow/docs/make.jl
deleted file mode 100644
index 8ee7425..0000000
--- a/julia/Arrow/docs/make.jl
+++ /dev/null
@@ -1,24 +0,0 @@
-using Documenter
-using Arrow
-
-
-makedocs(;
-    modules=[Arrow],
-    repo="https://github.com/JuliaData/Arrow.jl/blob/{commit}{path}#L{line}",
-    sitename="Arrow.jl",
-    format=Documenter.HTML(;
-        prettyurls=get(ENV, "CI", "false") == "true",
-        canonical="https://JuliaData.github.io/Arrow.jl",
-        assets=String[],
-    ),
-    pages = [
-        "Home" => "index.md",
-        "User Manual" => "manual.md",
-        "API Reference" => "reference.md"
-    ]
-)
-
-deploydocs(;
-    repo="github.com/JuliaData/Arrow.jl",
-    devbranch = "main"
-)
diff --git a/julia/Arrow/docs/src/index.md b/julia/Arrow/docs/src/index.md
deleted file mode 100644
index ee4d141..0000000
--- a/julia/Arrow/docs/src/index.md
+++ /dev/null
@@ -1,10 +0,0 @@
-# Arrow.jl
-
-```@contents
-Pages = ["manual.md", "reference.md"]
-Depth = 3
-```
-
-```@docs
-Arrow
-```
\ No newline at end of file
diff --git a/julia/Arrow/docs/src/manual.md b/julia/Arrow/docs/src/manual.md
deleted file mode 100644
index 8f49f14..0000000
--- a/julia/Arrow/docs/src/manual.md
+++ /dev/null
@@ -1,150 +0,0 @@
-# User Manual
-
-The goal of this documentation is to provide a brief introduction to the arrow data format, then provide a walk-through of the functionality provided in the Arrow.jl Julia package, with an aim to expose a little of the machinery "under the hood" to help explain how things work and how that influences real-world use-cases for the arrow data format.
-
-The best place to learn about the Apache arrow project is [the website itself](https://arrow.apache.org/), specifically the data format [specification](https://arrow.apache.org/docs/format/Columnar.html). Put briefly, the arrow project provides a formal speficiation for how columnar, "table" data can be laid out efficiently in memory to standardize and maximize the ability to share data across languages/platforms. In the current [apache/arrow GitHub repository](https://github.com/apache/ [...]
-
-The [Arrow.jl](https://github.com/JuliaData/Arrow.jl) Julia package is another implementation, allowing the ability to both read and write data in the arrow format. As a data format, arrow specifies an exact memory layout to be used for columnar table data, and as such, "reading" involves custom Julia objects ([`Arrow.Table`](@ref) and [`Arrow.Stream`](@ref)), which read the *metadata* of an "arrow memory blob", then *wrap* the array data contained therein, having learned the type and si [...]
-
-
-## Reading arrow data
-
-After installing the Arrow.jl Julia package (via `] add Arrow`), and if you have some arrow data, let's say a file named `data.arrow` generated from the [`pyarrow`](https://arrow.apache.org/docs/python/) library (a Python library for interfacing with arrow data), you can then read that arrow data into a Julia session by doing:
-
-```julia
-using Arrow
-
-table = Arrow.Table("data.arrow")
-```
-
-### `Arrow.Table`
-
-The type of `table` in this example will be an `Arrow.Table`. When "reading" the arrow data, `Arrow.Table` first ["mmapped"](https://en.wikipedia.org/wiki/Mmap) the `data.arrow` file, which is an important technique for dealing with data larger than available RAM on a system. By "mmapping" a file, the OS doesn't actually load the entire file contents into RAM at the same time, but file contents are "swapped" into RAM as different regions of a file are requested. Once "mmapped", `Arrow.Ta [...]
-
-* [`Arrow.Primitive`](@ref): the most common array type for simple, fixed-size elements like integers, floats, time types, and decimals
-* [`Arrow.List`](@ref): an array type where its own elements are also arrays of some kind, like string columns, where each element can be thought of as an array of characters
-* [`Arrow.FixedSizeList`](@ref): similar to the `List` type, but where each array element has a fixed number of elements itself; you can think of this like a `Vector{NTuple{N, T}}`, where `N` is the fixed-size width
-* [`Arrow.Map`](@ref): an array type where each element is like a Julia `Dict`; a list of key value pairs like a `Vector{Dict}`
-* [`Arrow.Struct`](@ref): an array type where each element is an instance of a custom struct, i.e. an ordered collection of named & typed fields, kind of like a `Vector{NamedTuple}`
-* [`Arrow.DenseUnion`](@ref): an array type where elements may be of several different types, stored compactly; can be thought of like `Vector{Union{A, B}}`
-* [`Arrow.SparseUnion`](@ref): another array type where elements may be of several different types, but stored as if made up of identically lengthed child arrays for each possible type (less memory efficient than `DenseUnion`)
-* [`Arrow.DictEncoded`](@ref): a special array type where values are "dictionary encoded", meaning the list of unique, possible values for an array are stored internally in an "encoding pool", whereas each stored element of the array is just an integer "code" to index into the encoding pool for the actual value.
-
-And while these custom array types do subtype `AbstractArray`, there is only limited support for `setindex!`. Remember, these arrays are "views" into the raw arrow bytes, so for array types other than `Arrow.Primitive`, it gets pretty tricky to allow manipulating those raw arrow bytes. Nevetheless, it's as simple as calling `copy(x)` where `x` is any `ArrowVector` type, and a normal Julia `Vector` type will be fully materialized (which would then allow mutating/manipulating values).
-
-So, what can you do with an `Arrow.Table` full of data? Quite a bit actually!
-
-Because `Arrow.Table` implements the [Tables.jl](https://juliadata.github.io/Tables.jl/stable/) interface, it opens up a world of integrations for using arrow data. A few examples include:
-
-* `df = DataFrame(Arrow.Table(file))`: Build a [`DataFrame`](https://juliadata.github.io/DataFrames.jl/stable/), using the arrow vectors themselves; this allows utilizing a host of DataFrames.jl functionality directly on arrow data; grouping, joining, selecting, etc.
-* `Tables.datavaluerows(Arrow.Table(file)) |> @map(...) |> @filter(...) |> DataFrame`: use [`Query.jl`'s](https://www.queryverse.org/Query.jl/stable/standalonequerycommands/) row-processing utilities to map, group, filter, mutate, etc. directly over arrow data.
-* `Arrow.Table(file) |> SQLite.load!(db, "arrow_table")`: load arrow data directly into an sqlite database/table, where sql queries can be executed on the data
-* `Arrow.Table(file) |> CSV.write("arrow.csv")`: write arrow data out to a csv file
-
-A full list of Julia packages leveraging the Tables.jl inteface can be found [here](https://github.com/JuliaData/Tables.jl/blob/master/INTEGRATIONS.md).
-
-Apart from letting other packages have all the fun, an `Arrow.Table` itself can be plenty useful. For example, with `tbl = Arrow.Table(file)`:
-* `tbl[1]`: retrieve the first column via indexing; the number of columns can be queried via `length(tbl)`
-* `tbl[:col1]` or `tbl.col1`: retrieve the column named `col1`, either via indexing with the column name given as a `Symbol`, or via "dot-access"
-* `for col in tbl`: iterate through columns in the table
-* `AbstractDict` methods like `haskey(tbl, :col1)`, `get(tbl, :col1, nothing)`, `keys(tbl)`, or `values(tbl)`
-
-### Arrow types
-
-In the arrow data format, specific logical types are supported, a list of which can be found [here](https://arrow.apache.org/docs/status.html#data-types). These include booleans, integers of various bit widths, floats, decimals, time types, and binary/string. While most of these map naturally to types builtin to Julia itself, there are a few cases where the definitions are slightly different, and in these cases, by default, they are converted to more "friendly" Julia types (this auto con [...]
-
-* `Date`, `Time`, `Timestamp`, and `Duration` all have natural Julia defintions in `Dates.Date`, `Dates.Time`, `TimeZones.ZonedDateTime`, and `Dates.Period` subtypes, respectively. 
-* `Char` and `Symbol` Julia types are mapped to arrow string types, with additional metadata of the original Julia type; this allows deserializing directly to `Char` and `Symbol` in Julia, while other language implementations will see these columns as just strings
-* `Decimal128` and `Decimal256` have no corresponding builtin Julia types, so they're deserialized using a compatible type definition in Arrow.jl itself: `Arrow.Decimal`
-
-Note that when `convert=false` is passed, data will be returned in Arrow.jl-defined types that exactly match the arrow definitions of those types; the authoritative source for how each type represents its data can be found in the arrow [`Schema.fbs`](https://github.com/apache/arrow/blob/master/format/Schema.fbs) file.
-
-#### Custom types
-
-To support writing your custom Julia struct, Arrow.jl utilizes the format's mechanism for "extension types" by storing
-the Julia type name in the field metadata. To "hook in" to this machinery, custom types can just call
-`Arrow.ArrowTypes.registertype!(T, T)`, where `T` is the custom struct type. For example:
-
-```julia
-using Arrow
-
-struct Person
-    id::Int
-    name::String
-end
-
-Arrow.ArrowTypes.registertype!(Person, Person)
-
-table = (col1=[Person(1, "Bob"), Person(2, "Jane")],)
-io = IOBuffer()
-Arrow.write(io, table)
-seekstart(io)
-table2 = Arrow.Table(io)
-```
-
-In this example, we're writing our `table`, which is a NamedTuple with one column named `col1`, which has two
-elements which are instances of our custom `Person` struct. We call `Arrow.Arrowtypes.registertype!` so that
-Arrow.jl knows how to serialize our `Person` struct. We then read the table back in using `Arrow.Table` and
-the table we get back will be an `Arrow.Table`, with a single `Arrow.Struct` column with element type `Person`.
-
-Note that without calling `Arrow.Arrowtypes.registertype!`, we may get into a weird limbo state where we've written
-our table with `Person` structs out as a table, but when reading back in, Arrow.jl doesn't know what a `Person` is;
-deserialization won't fail, but we'll just get a `Namedtuple{(:id, :name), Tuple{Int, String}}` back instead of `Person`.
-
-!!! warning
-
-    If `Arrow.ArrowTypes.registertype!` is called in a downstream package, e.g. to register a custom type defined in
-    that package, it must be called from the `__init__` function of the package's top-level module
-    (see the [Julia docs](https://docs.julialang.org/en/v1/manual/modules/#Module-initialization-and-precompilation)
-    for more on `__init__` functions). Otherwise, the type will only be registered during the precompilation phase,
-    but that state will be lost afterwards (and in particular, the type will not be registered when the package is loaded).
-
-### `Arrow.Stream`
-
-In addition to `Arrow.Table`, the Arrow.jl package also provides `Arrow.Stream` for processing arrow data. While `Arrow.Table` will iterate all record batches in an arrow file/stream, concatenating columns, `Arrow.Stream` provides a way to *iterate* through record batches, one at a time. Each iteration yields an `Arrow.Table` instance, with columns/data for a single record batch. This allows, if so desired, "batch processing" of arrow data, one record batch at a time, instead of creating [...]
-
-### Table and column metadata
-
-The arrow format allows attaching arbitrary metadata in the form of a `Dict{String, String}` to tables and individual columns. The Arrow.jl package supports retrieving serialized metadata by calling `Arrow.getmetadata(table)` or `Arrow.getmetadata(column)`.
-
-## Writing arrow data
-
-Ok, so that's a pretty good rundown of *reading* arrow data, but how do you *produce* arrow data? Enter `Arrow.write`.
-
-### `Arrow.write`
-
-With `Arrow.write`, you provide either an `io::IO` argument or `file::String` to write the arrow data to, as well as a Tables.jl-compatible source that contains the data to be written.
-
-What are some examples of Tables.jl-compatible sources? A few examples include:
-* `Arrow.write(io, df::DataFrame)`: A `DataFrame` is a collection of indexable columns
-* `Arrow.write(io, CSV.File(file))`: read data from a csv file and write out to arrow format
-* `Arrow.write(io, DBInterface.execute(db, sql_query))`: Execute an SQL query against a database via the [`DBInterface.jl`](https://github.com/JuliaDatabases/DBInterface.jl) interface, and write the query resultset out directly in the arrow format. Packages that implement DBInterface include [SQLite.jl](https://juliadatabases.github.io/SQLite.jl/stable/), [MySQL.jl](https://juliadatabases.github.io/MySQL.jl/dev/), and [ODBC.jl](http://juliadatabases.github.io/ODBC.jl/latest/). 
-* `df |> @map(...) |> Arrow.write(io)`: Write the results of a [Query.jl](https://www.queryverse.org/Query.jl/stable/) chain of operations directly out as arrow data
-* `jsontable(json) |> Arrow.write(io)`: Treat a json array of objects or object of arrays as a "table" and write it out as arrow data using the [JSONTables.jl](https://github.com/JuliaData/JSONTables.jl) package
-* `Arrow.write(io, (col1=data1, col2=data2, ...))`: a `NamedTuple` of `AbstractVector`s or an `AbstractVector` of `NamedTuple`s are both considered tables by default, so they can be quickly constructed for easy writing of arrow data if you already have columns of data
-
-And these are just a few examples of the numerous [integrations](https://github.com/JuliaData/Tables.jl/blob/master/INTEGRATIONS.md).
-
-In addition to just writing out a single "table" of data as a single arrow record batch, `Arrow.write` also supports writing out multiple record batches when the input supports the `Tables.partitions` functionality. One immediate, though perhaps not incredibly useful example, is `Arrow.Stream`. `Arrow.Stream` implements `Tables.partitions` in that it iterates "tables" (specifically `Arrow.Table`), and as such, `Arrow.write` will iterate an `Arrow.Stream`, and write out each `Arrow.Table` [...]
-
-In addition to inputs that support `Tables.partitions`, note that the Tables.jl itself provides the `Tables.partitioner` function, which allows providing your own separate instances of similarly-schema-ed tables as "partitions", like:
-
-```julia
-# treat 2 separate NamedTuples of vectors with same schema as 1 table, 2 partitions
-tbl_parts = Tables.partitioner([(col1=data1, col2=data2), (col1=data3, col2=data4)])
-Arrow.write(io, tbl_parts)
-
-# treat an array of csv files with same schema where each file is a partition
-# in this form, a function `CSV.File` is applied to each element of 2nd argument
-csv_parts = Tables.partitioner(CSV.File, csv_files)
-Arrow.write(io, csv_parts)
-```
-
-### Multithreaded writing
-
-By default, `Arrow.write` will use multiple threads to write multiple
-record batches simultaneously (e.g. if julia is started with `julia -t 8` or the `JULIA_NUM_THREADS` environment variable is set).
-
-### Compression
-
-Compression is supported when writing via the `compress` keyword argument. Possible values include `:lz4`, `:zstd`, or your own initialized `LZ4FrameCompressor` or `ZstdCompressor` objects; will cause all buffers in each record batch to use the respective compression encoding or compressor.
diff --git a/julia/Arrow/docs/src/reference.md b/julia/Arrow/docs/src/reference.md
deleted file mode 100644
index 9130082..0000000
--- a/julia/Arrow/docs/src/reference.md
+++ /dev/null
@@ -1,6 +0,0 @@
-# API Reference
-
-```@autodocs
-Modules = [Arrow]
-Order   = [:type, :function]
-```
\ No newline at end of file
diff --git a/julia/Arrow/src/Arrow.jl b/julia/Arrow/src/Arrow.jl
deleted file mode 100644
index 5472b15..0000000
--- a/julia/Arrow/src/Arrow.jl
+++ /dev/null
@@ -1,107 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-    Arrow.jl
-
-A pure Julia implementation of the [apache arrow](https://arrow.apache.org/) memory format specification.
-
-This implementation supports the 1.0 version of the specification, including support for:
-  * All primitive data types
-  * All nested data types
-  * Dictionary encodings, nested dictionary encodings, and messages
-  * Extension types
-  * Streaming, file, record batch, and replacement and isdelta dictionary messages
-  * Buffer compression/decompression via the standard LZ4 frame and Zstd formats
-
-It currently doesn't include support for:
-  * Tensors or sparse tensors
-  * Flight RPC
-  * C data interface
-
-Third-party data formats:
-  * csv and parquet support via the existing [CSV.jl](https://github.com/JuliaData/CSV.jl) and [Parquet.jl](https://github.com/JuliaIO/Parquet.jl) packages
-  * Other [Tables.jl](https://github.com/JuliaData/Tables.jl)-compatible packages automatically supported ([DataFrames.jl](https://github.com/JuliaData/DataFrames.jl), [JSONTables.jl](https://github.com/JuliaData/JSONTables.jl), [JuliaDB.jl](https://github.com/JuliaData/JuliaDB.jl), [SQLite.jl](https://github.com/JuliaDatabases/SQLite.jl), [MySQL.jl](https://github.com/JuliaDatabases/MySQL.jl), [JDBC.jl](https://github.com/JuliaDatabases/JDBC.jl), [ODBC.jl](https://github.com/JuliaDataba [...]
-  * No current Julia packages support ORC or Avro data formats
-
-See docs for official Arrow.jl API with the [User Manual](@ref) and reference docs for [`Arrow.Table`](@ref), [`Arrow.write`](@ref), and [`Arrow.Stream`](@ref).
-"""
-module Arrow
-
-using Mmap
-import Dates
-using DataAPI, Tables, SentinelArrays, PooledArrays, CodecLz4, CodecZstd, TimeZones, BitIntegers
-
-using Base: @propagate_inbounds
-import Base: ==
-
-const DEBUG_LEVEL = Ref(0)
-
-function setdebug!(level::Int)
-    DEBUG_LEVEL[] = level
-    return
-end
-
-function withdebug(f, level)
-    lvl = DEBUG_LEVEL[]
-    try
-        setdebug!(level)
-        f()
-    finally
-        setdebug!(lvl)
-    end
-end
-
-macro debug(level, msg)
-    esc(quote
-        if DEBUG_LEVEL[] >= $level
-            println(string("DEBUG: ", $(QuoteNode(__source__.file)), ":", $(QuoteNode(__source__.line)), " ", $msg))
-        end
-    end)
-end
-
-const FILE_FORMAT_MAGIC_BYTES = b"ARROW1"
-const CONTINUATION_INDICATOR_BYTES = 0xffffffff
-
-# vendored flatbuffers code for now
-include("FlatBuffers/FlatBuffers.jl")
-using .FlatBuffers
-
-include("metadata/Flatbuf.jl")
-using .Flatbuf; const Meta = Flatbuf
-
-include("arrowtypes.jl")
-using .ArrowTypes
-include("utils.jl")
-include("arraytypes/arraytypes.jl")
-include("eltypes.jl")
-include("table.jl")
-include("write.jl")
-
-const LZ4_FRAME_COMPRESSOR = Ref{LZ4FrameCompressor}()
-const ZSTD_COMPRESSOR = Ref{ZstdCompressor}()
-
-function __init__()
-    zstd = ZstdCompressor(; level=3)
-    CodecZstd.TranscodingStreams.initialize(zstd)
-    ZSTD_COMPRESSOR[] = zstd
-    lz4 = LZ4FrameCompressor(; compressionlevel=4)
-    CodecLz4.TranscodingStreams.initialize(lz4)
-    LZ4_FRAME_COMPRESSOR[] = lz4
-    return
-end
-
-end  # module Arrow
diff --git a/julia/Arrow/src/FlatBuffers/FlatBuffers.jl b/julia/Arrow/src/FlatBuffers/FlatBuffers.jl
deleted file mode 100644
index f4c7477..0000000
--- a/julia/Arrow/src/FlatBuffers/FlatBuffers.jl
+++ /dev/null
@@ -1,153 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-module FlatBuffers
-
-const UOffsetT = UInt32
-const SOffsetT = Int32
-const VOffsetT = UInt16
-const VtableMetadataFields = 2
-
-basetype(::Enum) = UInt8
-
-function readbuffer(t::AbstractVector{UInt8}, pos::Integer, ::Type{Bool})
-    @inbounds b = t[pos + 1]
-    return b === 0x01
-end
-
-function readbuffer(t::AbstractVector{UInt8}, pos::Integer, ::Type{T}) where {T}
-    GC.@preserve t begin
-        ptr = convert(Ptr{T}, pointer(t, pos + 1))
-        x = unsafe_load(ptr)
-    end
-end
-
-include("builder.jl")
-include("table.jl")
-
-function Base.show(io::IO, x::TableOrStruct)
-    print(io, "$(typeof(x))")
-    if isempty(propertynames(x))
-        print(io, "()")
-    else
-        show(io, NamedTuple{propertynames(x)}(Tuple(getproperty(x, y) for y in propertynames(x))))
-    end
-end
-
-abstract type ScopedEnum{T<:Integer} <: Enum{T} end
-
-macro scopedenum(T, syms...)
-    if isempty(syms)
-        throw(ArgumentError("no arguments given for ScopedEnum $T"))
-    end
-    basetype = Int32
-    typename = T
-    if isa(T, Expr) && T.head === :(::) && length(T.args) == 2 && isa(T.args[1], Symbol)
-        typename = T.args[1]
-        basetype = Core.eval(__module__, T.args[2])
-        if !isa(basetype, DataType) || !(basetype <: Integer) || !isbitstype(basetype)
-            throw(ArgumentError("invalid base type for ScopedEnum $typename, $T=::$basetype; base type must be an integer primitive type"))
-        end
-    elseif !isa(T, Symbol)
-        throw(ArgumentError("invalid type expression for ScopedEnum $T"))
-    end
-    values = basetype[]
-    seen = Set{Symbol}()
-    namemap = Dict{basetype,Symbol}()
-    lo = hi = 0
-    i = zero(basetype)
-    hasexpr = false
-
-    if length(syms) == 1 && syms[1] isa Expr && syms[1].head === :block
-        syms = syms[1].args
-    end
-    for s in syms
-        s isa LineNumberNode && continue
-        if isa(s, Symbol)
-            if i == typemin(basetype) && !isempty(values)
-                throw(ArgumentError("overflow in value \"$s\" of ScopedEnum $typename"))
-            end
-        elseif isa(s, Expr) &&
-               (s.head === :(=) || s.head === :kw) &&
-               length(s.args) == 2 && isa(s.args[1], Symbol)
-            i = Core.eval(__module__, s.args[2]) # allow exprs, e.g. uint128"1"
-            if !isa(i, Integer)
-                throw(ArgumentError("invalid value for ScopedEnum $typename, $s; values must be integers"))
-            end
-            i = convert(basetype, i)
-            s = s.args[1]
-            hasexpr = true
-        else
-            throw(ArgumentError(string("invalid argument for ScopedEnum ", typename, ": ", s)))
-        end
-        if !Base.isidentifier(s)
-            throw(ArgumentError("invalid name for ScopedEnum $typename; \"$s\" is not a valid identifier"))
-        end
-        if hasexpr && haskey(namemap, i)
-            throw(ArgumentError("both $s and $(namemap[i]) have value $i in ScopedEnum $typename; values must be unique"))
-        end
-        namemap[i] = s
-        push!(values, i)
-        if s in seen
-            throw(ArgumentError("name \"$s\" in ScopedEnum $typename is not unique"))
-        end
-        push!(seen, s)
-        if length(values) == 1
-            lo = hi = i
-        else
-            lo = min(lo, i)
-            hi = max(hi, i)
-        end
-        i += oneunit(i)
-    end
-    defs = Expr(:block)
-    if isa(typename, Symbol)
-        for (i, sym) in namemap
-            push!(defs.args, :(const $(esc(sym)) = $(esc(typename))($i)))
-        end
-    end
-    mod = Symbol(typename, "Module")
-    syms = Tuple(Base.values(namemap))
-    blk = quote
-        module $(esc(mod))
-            export $(esc(typename))
-            # enum definition
-            primitive type $(esc(typename)) <: ScopedEnum{$(basetype)} $(sizeof(basetype) * 8) end
-            function $(esc(typename))(x::Integer)
-                $(Base.Enums.membershiptest(:x, values)) || Base.Enums.enum_argument_error($(Expr(:quote, typename)), x)
-                return Core.bitcast($(esc(typename)), convert($(basetype), x))
-            end
-            if isdefined(Base.Enums, :namemap)
-                Base.Enums.namemap(::Type{$(esc(typename))}) = $(esc(namemap))
-            end
-            Base.getproperty(::Type{$(esc(typename))}, sym::Symbol) = sym in $syms ? getfield($(esc(mod)), sym) : getfield($(esc(typename)), sym)
-            Base.typemin(x::Type{$(esc(typename))}) = $(esc(typename))($lo)
-            Base.typemax(x::Type{$(esc(typename))}) = $(esc(typename))($hi)
-            let insts = (Any[ $(esc(typename))(v) for v in $values ]...,)
-                Base.instances(::Type{$(esc(typename))}) = insts
-            end
-            FlatBuffers.basetype(::$(esc(typename))) = $(basetype)
-            FlatBuffers.basetype(::Type{$(esc(typename))}) = $(basetype)
-            $defs
-        end
-    end
-    push!(blk.args, :nothing)
-    blk.head = :toplevel
-    push!(blk.args, :(using .$mod))
-    return blk
-end
-
-end # module
diff --git a/julia/Arrow/src/FlatBuffers/builder.jl b/julia/Arrow/src/FlatBuffers/builder.jl
deleted file mode 100644
index 0c65c6f..0000000
--- a/julia/Arrow/src/FlatBuffers/builder.jl
+++ /dev/null
@@ -1,440 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-const fileIdentifierLength = 4
-
-"""
-Scalar
-A Union of the Julia types `T <: Number` that are allowed in FlatBuffers schema
-"""
-const Scalar = Union{Bool,
-Int8, Int16, Int32, Int64,
-UInt8, UInt16, UInt32, UInt64,
-Float32, Float64, Enum}
-
-"""
-Builder is a state machine for creating FlatBuffer objects.
-Use a Builder to construct object(s) starting from leaf nodes.
-
-A Builder constructs byte buffers in a last-first manner for simplicity and
-performance.
-"""
-mutable struct Builder
-    bytes::Vector{UInt8}
-    minalign::Int
-    vtable::Vector{UOffsetT}
-    objectend::UOffsetT
-    vtables::Vector{UOffsetT}
-    head::UOffsetT
-    nested::Bool
-    finished::Bool
-    sharedstrings::Dict{String, UOffsetT}
-end
-
-bytes(b::Builder) = getfield(b, :bytes)
-
-Builder(size=0) = Builder(zeros(UInt8, size), 1, UOffsetT[], UOffsetT(0), UOffsetT[], UOffsetT(size), false, false, Dict{String, UOffsetT}())
-
-function reset!(b::Builder)
-    empty!(b.bytes)
-    empty!(b.vtable)
-    emtpy!(b.vtables)
-    empty!(b.sharedstrings)
-    b.minalign = 1
-    b.nested = false
-    b.finished = false
-    b.head = 0
-    return
-end
-
-Base.write(sink::Builder, o, x::Union{Bool,UInt8}) = sink.bytes[o+1] = UInt8(x)
-function Base.write(sink::Builder, off, x::T) where {T}
-    off += 1
-    for (i, ind) = enumerate(off:(off + sizeof(T) - 1))
-        sink.bytes[ind] = (x >> ((i-1) * 8)) % UInt8
-    end
-end
-Base.write(b::Builder, o, x::Float32) = write(b, o, reinterpret(UInt32, x))
-Base.write(b::Builder, o, x::Float64) = write(b, o, reinterpret(UInt64, x))
-Base.write(b::Builder, o, x::Enum) = write(b, o, basetype(x)(x))
-
-"""
-`finishedbytes` returns a pointer to the written data in the byte buffer.
-Panics if the builder is not in a finished state (which is caused by calling
-`finish!()`).
-"""
-function finishedbytes(b::Builder)
-    assertfinished(b)
-    return view(b.bytes, (b.head + 1):length(b.bytes))
-end
-
-function startobject!(b::Builder, numfields)
-    assertnotnested(b)
-    b.nested = true
-    resize!(b.vtable, numfields)
-    fill!(b.vtable, 0)
-    b.objectend = offset(b)
-    return
-end
-
-"""
-WriteVtable serializes the vtable for the current object, if applicable.
-
-Before writing out the vtable, this checks pre-existing vtables for equality
-to this one. If an equal vtable is found, point the object to the existing
-vtable and return.
-
-Because vtable values are sensitive to alignment of object data, not all
-logically-equal vtables will be deduplicated.
-
-A vtable has the following format:
-<VOffsetT: size of the vtable in bytes, including this value>
-<VOffsetT: size of the object in bytes, including the vtable offset>
-<VOffsetT: offset for a field> * N, where N is the number of fields in
-the schema for this type. Includes deprecated fields.
-Thus, a vtable is made of 2 + N elements, each SizeVOffsetT bytes wide.
-
-An object has the following format:
-<SOffsetT: offset to this object's vtable (may be negative)>
-<byte: data>+
-"""
-function writevtable!(b::Builder)
-    # Prepend a zero scalar to the object. Later in this function we'll
-    # write an offset here that points to the object's vtable:
-    prepend!(b, SOffsetT(0))
-
-    objectOffset = offset(b)
-    existingVtable = UOffsetT(0)
-
-    # Trim vtable of trailing zeroes.
-    i = findlast(!iszero, b.vtable)
-    if i !== nothing
-        resize!(b.vtable, i)
-    end
-    
-    # Search backwards through existing vtables, because similar vtables
-    # are likely to have been recently appended. See
-    # BenchmarkVtableDeduplication for a case in which this heuristic
-    # saves about 30% of the time used in writing objects with duplicate
-    # tables.
-    for i = length(b.vtables):-1:1
-        # Find the other vtable, which is associated with `i`:
-        vt2Offset = b.vtables[i]
-        vt2Start = length(b.bytes) - vt2Offset
-        vt2Len = readbuffer(b.bytes, vt2Start, VOffsetT)
-
-        metadata = VtableMetadataFields * sizeof(VOffsetT)
-        vt2End = vt2Start + vt2Len
-        vt2 = view(b.bytes, (vt2Start + metadata + 1):vt2End) #TODO: might need a +1 on the start of range here
-
-        # Compare the other vtable to the one under consideration.
-        # If they are equal, store the offset and break:
-        if vtableEqual(b.vtable, objectOffset, vt2)
-            existingVtable = vt2Offset
-            break
-        end
-    end
-
-    if existingVtable == 0
-        # Did not find a vtable, so write this one to the buffer.
-
-        # Write out the current vtable in reverse , because
-        # serialization occurs in last-first order:
-        for i = length(b.vtable):-1:1
-            off::UOffsetT = 0
-            if b.vtable[i] != 0
-                # Forward reference to field;
-                # use 32bit number to assert no overflow:
-                off = objectOffset - b.vtable[i]
-            end
-            prepend!(b, VOffsetT(off))
-        end
-
-        # The two metadata fields are written last.
-
-        # First, store the object bytesize:
-        objectSize = objectOffset - b.objectend
-        prepend!(b, VOffsetT(objectSize))
-
-        # Second, store the vtable bytesize:
-        vbytes = (length(b.vtable) + VtableMetadataFields) * sizeof(VOffsetT)
-        prepend!(b, VOffsetT(vbytes))
-
-        # Next, write the offset to the new vtable in the
-        # already-allocated SOffsetT at the beginning of this object:
-        objectStart = SOffsetT(length(b.bytes) - objectOffset)
-        write(b, objectStart, SOffsetT(offset(b) - objectOffset))
-
-        # Finally, store this vtable in memory for future
-        # deduplication:
-        push!(b.vtables, offset(b))
-    else
-        # Found a duplicate vtable.
-
-        objectStart = SOffsetT(length(b.bytes) - objectOffset)
-        b.head = objectStart
-
-        # Write the offset to the found vtable in the
-        # already-allocated SOffsetT at the beginning of this object:
-        write(b, b.head, SOffsetT(existingVtable) - SOffsetT(objectOffset))
-    end
-
-    empty!(b.vtable)
-    return objectOffset
-end
-
-"""
-`endobject` writes data necessary to finish object construction.
-"""
-function endobject!(b::Builder)
-    assertnested(b)
-    n = writevtable!(b)
-    b.nested = false
-    return n
-end
-
-offset(b::Builder) = UOffsetT(length(b.bytes) - b.head)
-
-pad!(b::Builder, n) = foreach(x->place!(b, 0x00), 1:n)
-
-"""
-`prep!` prepares to write an element of `size` after `additionalbytes`
-have been written, e.g. if you write a string, you need to align such
-the int length field is aligned to sizeof(Int32), and the string data follows it
-directly.
-If all you need to do is align, `additionalbytes` will be 0.
-"""
-function prep!(b::Builder, size, additionalbytes)
-    # Track the biggest thing we've ever aligned to.
-    if size > b.minalign
-        b.minalign = size
-    end
-    # Find the amount of alignment needed such that `size` is properly
-    # aligned after `additionalBytes`:
-    alignsize = xor(Int(-1), (length(b.bytes) - b.head) + additionalbytes) + 1
-    alignsize &= (size - 1)
-
-    # Reallocate the buffer if needed:
-    totalsize = alignsize + size + additionalbytes
-    if b.head <= totalsize
-        len = length(b.bytes)
-        prepend!(b.bytes, zeros(UInt8, totalsize))
-        b.head += length(b.bytes) - len
-    end
-    pad!(b, alignsize)
-    return
-end
-
-function Base.prepend!(b::Builder, x::T) where {T}
-    prep!(b, sizeof(T), 0)
-    place!(b, x)
-    return
-end
-
-function prependoffset!(b::Builder, off)
-    prep!(b, sizeof(Int32), 0) # Ensure alignment is already done.
-    if !(off <= offset(b))
-        throw(ArgumentError("unreachable: $off <= $(offset(b))"))
-    end
-    place!(b, SOffsetT(offset(b) - off + sizeof(SOffsetT)))
-    return
-end
-
-function prependoffsetslot!(b::Builder, o::Int, x::T, d) where {T}
-    if x != T(d)
-        prependoffset!(b, x)
-        slot!(b, o)
-    end
-    return
-end
-
-"""
-`startvector` initializes bookkeeping for writing a new vector.
-
-A vector has the following format:
-<UOffsetT: number of elements in this vector>
-<T: data>+, where T is the type of elements of this vector.
-"""
-function startvector!(b::Builder, elemSize, numElems, alignment)
-    assertnotnested(b)
-    b.nested = true
-    prep!(b, sizeof(UInt32), elemSize * numElems)
-    prep!(b, alignment, elemSize * numElems)
-    return offset(b)
-end
-
-"""
-`endvector` writes data necessary to finish vector construction.
-"""
-function endvector!(b::Builder, vectorNumElems)
-    assertnested(b)
-    place!(b, UOffsetT(vectorNumElems))
-    b.nested = false
-    return offset(b)
-end
-
-function createsharedstring!(b::Builder, s::AbstractString)
-    get!(b.sharedstrings, s) do
-        createstring!(b, s)
-    end
-end
-
-"""
-`createstring!` writes a null-terminated string as a vector.
-"""
-function createstring!(b::Builder, s::Union{AbstractString, AbstractVector{UInt8}})
-    assertnotnested(b)
-    b.nested = true
-    s = codeunits(s)
-    prep!(b, sizeof(UInt32), sizeof(s) + 1)
-    place!(b, UInt8(0))
-
-    l = sizeof(s)
-
-    b.head -= l
-    copyto!(b.bytes, b.head+1, s, 1, l)
-    return endvector!(b, sizeof(s))
-end
-
-createbytevector(b::Builder, v) = createstring!(b, v)
-
-function assertnested(b::Builder)
-    # If you get this assert, you're in an object while trying to write
-    # data that belongs outside of an object.
-    # To fix this, write non-inline data (like vectors) before creating
-    # objects.
-    if !b.nested
-        throw(ArgumentError("Incorrect creation order: must be inside object."))
-    end
-    return
-end
-
-function assertnotnested(b::Builder)
-    # If you hit this, you're trying to construct a Table/Vector/String
-    # during the construction of its parent table (between the MyTableBuilder
-    # and builder.Finish()).
-    # Move the creation of these view-objects to above the MyTableBuilder to
-    # not get this assert.
-    # Ignoring this assert may appear to work in simple cases, but the reason
-    # it is here is that storing objects in-line may cause vtable offsets
-    # to not fit anymore. It also leads to vtable duplication.
-    if b.nested
-        throw(ArgumentError("Incorrect creation order: object must not be nested."))
-    end
-    return
-end
-
-function assertfinished(b::Builder)
-    # If you get this assert, you're attempting to get access a buffer
-    # which hasn't been finished yet. Be sure to call builder.Finish()
-    # with your root table.
-    # If you really need to access an unfinished buffer, use the bytes
-    # buffer directly.
-    if !b.finished
-        throw(ArgumentError("Incorrect use of FinishedBytes(): must call 'Finish' first."))
-    end
-end
-
-"""
-`prependslot!` prepends a `T` onto the object at vtable slot `o`.
-If value `x` equals default `d`, then the slot will be set to zero and no
-other data will be written.
-"""
-function prependslot!(b::Builder, o::Int, x::T, d, sh=false) where {T <: Scalar}
-    if x != T(d)
-        prepend!(b, x)
-        slot!(b, o)
-    end
-    return
-end
-
-"""
-`prependstructslot!` prepends a struct onto the object at vtable slot `o`.
-Structs are stored inline, so nothing additional is being added.
-In generated code, `d` is always 0.
-"""
-function prependstructslot!(b::Builder, voffset, x, d)
-    if x != d
-        assertnested(b)
-        if x != offset(b)
-            throw(ArgumentError("inline data write outside of object"))
-        end
-        slot!(b, voffset)
-    end
-    return
-end
-
-"""
-`slot!` sets the vtable key `voffset` to the current location in the buffer.
-"""
-function slot!(b::Builder, slotnum)
-    b.vtable[slotnum + 1] = offset(b)
-end
-
-# FinishWithFileIdentifier finalizes a buffer, pointing to the given `rootTable`.
-# as well as applys a file identifier
-function finishwithfileidentifier(b::Builder, rootTable, fid)
-    if length(fid) != fileIdentifierLength
-        error("incorrect file identifier length")
-    end
-    # In order to add a file identifier to the flatbuffer message, we need
-    # to prepare an alignment and file identifier length
-    prep!(b, b.minalign, sizeof(Int32) + fileIdentifierLength)
-    for i = fileIdentifierLength:-1:1
-        # place the file identifier
-        place!(b, fid[i])
-    end
-    # finish
-    finish!(b, rootTable)
-end
-
-"""
-`finish!` finalizes a buffer, pointing to the given `rootTable`.
-"""
-function finish!(b::Builder, rootTable)
-    assertnotnested(b)
-    prep!(b, b.minalign, sizeof(UOffsetT))
-    prependoffset!(b, UOffsetT(rootTable))
-    b.finished = true
-    return
-end
-
-"vtableEqual compares an unwritten vtable to a written vtable."
-function vtableEqual(a::Vector{UOffsetT}, objectStart, b::AbstractVector{UInt8})
-    if length(a) * sizeof(VOffsetT) != length(b)
-        return false
-    end
-
-    for i = 0:(length(a)-1)
-        x = read(IOBuffer(view(b, (i * sizeof(VOffsetT) + 1):length(b))), VOffsetT)
-
-        # Skip vtable entries that indicate a default value.
-        x == 0 && a[i+1] == 0 && continue
-
-        y = objectStart - a[i+1]
-        x != y && return false
-    end
-    return true
-end
-
-"""
-`place!` prepends a `T` to the Builder, without checking for space.
-"""
-function place!(b::Builder, x::T) where {T}
-    b.head -= sizeof(T)
-    write(b, b.head, x)
-    return
-end
diff --git a/julia/Arrow/src/FlatBuffers/table.jl b/julia/Arrow/src/FlatBuffers/table.jl
deleted file mode 100644
index bb11a8f..0000000
--- a/julia/Arrow/src/FlatBuffers/table.jl
+++ /dev/null
@@ -1,170 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Table
-
-The object containing the flatbuffer and positional information specific to the table.
-The `vtable` containing the offsets for specific members precedes `pos`.
-The actual values in the table follow `pos` offset and size of the vtable.
-
-- `bytes::Vector{UInt8}`: the flatbuffer itself
-- `pos::Integer`:  the base position in `bytes` of the table
-"""
-abstract type Table end
-abstract type Struct end
-
-const TableOrStruct = Union{Table, Struct}
-
-bytes(x::TableOrStruct) = getfield(x, :bytes)
-pos(x::TableOrStruct) = getfield(x, :pos)
-
-(::Type{T})(b::Builder) where {T <: TableOrStruct} = T(b.bytes[b.head+1:end], get(b, b.head, Int32))
-
-getrootas(::Type{T}, bytes::Vector{UInt8}, offset) where {T <: Table} = init(T, bytes, offset + readbuffer(bytes, offset, UOffsetT))
-init(::Type{T}, bytes::Vector{UInt8}, pos::Integer) where {T <: TableOrStruct} = T(bytes, pos)
-
-const TableOrBuilder = Union{Table, Struct, Builder}
-
-Base.get(t::TableOrBuilder, pos, ::Type{T}) where {T} = readbuffer(bytes(t), pos, T)
-Base.get(t::TableOrBuilder, pos, ::Type{T}) where {T <: Enum} = T(get(t, pos, basetype(T)))
-
-"""
-`offset` provides access into the Table's vtable.
-
-Deprecated fields are ignored by checking against the vtable's length.
-"""
-function offset(t::Table, vtableoffset)
-    vtable = pos(t) - get(t, pos(t), SOffsetT)
-    return vtableoffset < get(t, vtable, VOffsetT) ? get(t, vtable + vtableoffset, VOffsetT) : VOffsetT(0)
-end
-
-"`indirect` retrieves the relative offset stored at `offset`."
-indirect(t::Table, off) = off + get(t, off, UOffsetT)
-
-getvalue(t, o, ::Type{Nothing}) = nothing
-getvalue(t, o, ::Type{T}) where {T <: Scalar} = get(t, pos(t) + o, T)
-getvalue(t, o, ::Type{T}) where {T <: Enum} = T(get(t, pos(t) + o, enumtype(T)))
-
-function Base.String(t::Table, off)
-    off += get(t, off, UOffsetT)
-    start = off + sizeof(UOffsetT)
-    len = get(t, off, UOffsetT)
-    return unsafe_string(pointer(bytes(t), start + 1), len)
-end
-
-function bytevector(t::Table, off)
-    off += get(t, off, UOffsetT)
-    start = off + sizeof(UOffsetT)
-    len = get(t, off, UOffsetT)
-    return view(bytes(t), (start + 1):(start + len + 1))
-end
-
-"""
-`vectorlen` retrieves the length of the vector whose offset is stored at
-`off` in this object.
-"""
-function vectorlen(t::Table, off)
-    off += pos(t)
-    off += get(t, off, UOffsetT)
-    return Int(get(t, off, UOffsetT))
-end
-
-"""
-`vector` retrieves the start of data of the vector whose offset is stored
-at `off` in this object.
-"""
-function vector(t::Table, off)
-    off += pos(t)
-    x = off + get(t, off, UOffsetT)
-    # data starts after metadata containing the vector length
-    return x + sizeof(UOffsetT)
-end
-
-struct Array{T, S, TT} <: AbstractVector{T}
-    _tab::TT
-    pos::Int64
-    data::Vector{S}
-end
-
-function Array{T}(t::Table, off) where {T}
-    a = vector(t, off)
-    S = T <: Table ? UOffsetT : T <: Struct ? NTuple{structsizeof(T), UInt8} : T
-    ptr = convert(Ptr{S}, pointer(bytes(t), pos(t) + a + 1))
-    data = unsafe_wrap(Base.Array, ptr, vectorlen(t, off))
-    return Array{T, S, typeof(t)}(t, a, data)
-end
-
-function structsizeof end
-
-Base.IndexStyle(::Type{<:Array}) = Base.IndexLinear()
-Base.size(x::Array) = size(x.data)
-Base.@propagate_inbounds function Base.getindex(A::Array{T, S}, i::Integer) where {T, S}
-    if T === S
-        return A.data[i]
-    elseif T <: Struct
-        return init(T, bytes(A._tab), A.pos + (i - 1) * structsizeof(T))
-    else # T isa Table
-        return init(T, bytes(A._tab), indirect(A._tab, A.pos + (i - 1) * 4))
-    end
-end
-
-Base.@propagate_inbounds function Base.setindex!(A::Array{T, S}, v, i::Integer) where {T, S}
-    if T === S
-        return setindex!(A.data, v, i)
-    else
-        error("setindex! not supported for reference/table types")
-    end
-end
-
-function union(t::Table, off)
-    off += pos(t)
-    return off + get(t, off, UOffsetT)
-end
-
-function union!(t::Table, t2::Table, off)
-    off += pos(t)
-    t2.pos = off + get(t, off, UOffsetT)
-    t2.bytes = bytes(t)
-    return
-end
-
-"""
-GetVOffsetTSlot retrieves the VOffsetT that the given vtable location
-points to. If the vtable value is zero, the default value `d`
-will be returned.
-"""
-function getoffsetslot(t::Table, slot, d)
-    off = offset(t, slot)
-    if off == 0
-        return d
-    end
-    return off
-end
-
-"""
-`getslot` retrieves the `T` that the given vtable location
-points to. If the vtable value is zero, the default value `d`
-will be returned.
-"""
-function getslot(t::Table, slot, d::T) where {T}
-    off = offset(t, slot)
-    if off == 0
-        return d
-    end
-
-    return get(t, pos(t) + off, T)
-end
diff --git a/julia/Arrow/src/arraytypes/arraytypes.jl b/julia/Arrow/src/arraytypes/arraytypes.jl
deleted file mode 100644
index ee57ebc..0000000
--- a/julia/Arrow/src/arraytypes/arraytypes.jl
+++ /dev/null
@@ -1,190 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-    Arrow.ArrowVector
-
-An abstract type that subtypes `AbstractVector`. Each specific arrow array type
-subtypes `ArrowVector`. See [`BoolVector`](@ref), [`Primitive`](@ref), [`List`](@ref),
-[`Map`](@ref), [`FixedSizeList`](@ref), [`Struct`](@ref), [`DenseUnion`](@ref),
-[`SparseUnion`](@ref), and [`DictEncoded`](@ref) for more details.
-"""
-abstract type ArrowVector{T} <: AbstractVector{T} end
-
-Base.IndexStyle(::Type{A}) where {A <: ArrowVector} = Base.IndexLinear()
-Base.similar(::Type{A}, dims::Dims) where {T, A <: ArrowVector{T}} = Vector{T}(undef, dims)
-validitybitmap(x::ArrowVector) = x.validity
-nullcount(x::ArrowVector) = validitybitmap(x).nc
-getmetadata(x::ArrowVector) = x.metadata
-
-function toarrowvector(x, i=1, de=Dict{Int64, Any}(), ded=DictEncoding[], meta=getmetadata(x); compression::Union{Nothing, LZ4FrameCompressor, ZstdCompressor}=nothing, kw...)
-    @debug 2 "converting top-level column to arrow format: col = $(typeof(x)), compression = $compression, kw = $(kw.data)"
-    @debug 3 x
-    A = arrowvector(x, i, 0, 0, de, ded, meta; compression=compression, kw...)
-    if compression isa LZ4FrameCompressor
-        A = compress(Meta.CompressionType.LZ4_FRAME, compression, A)
-    elseif compression isa ZstdCompressor
-        A = compress(Meta.CompressionType.ZSTD, compression, A)
-    end
-    @debug 2 "converted top-level column to arrow format: $(typeof(A))"
-    @debug 3 A
-    return A
-end
-
-function arrowvector(x, i, nl, fi, de, ded, meta; dictencoding::Bool=false, dictencode::Bool=false, kw...)
-    if !(x isa DictEncode) && !dictencoding && (dictencode || (x isa AbstractArray && DataAPI.refarray(x) !== x))
-        x = DictEncode(x, dictencodeid(i, nl, fi))
-    end
-    S = maybemissing(eltype(x))
-    return arrowvector(S, x, i, nl, fi, de, ded, meta; dictencode=dictencode, kw...)
-end
-
-# defaults for Dates types
-ArrowTypes.default(::Type{Dates.Date}) = Dates.Date(1,1,1)
-ArrowTypes.default(::Type{Dates.Time}) = Dates.Time(1,1,1)
-ArrowTypes.default(::Type{Dates.DateTime}) = Dates.DateTime(1,1,1,1,1,1)
-ArrowTypes.default(::Type{TimeZones.ZonedDateTime}) = TimeZones.ZonedDateTime(1,1,1,1,1,1,TimeZones.tz"UTC")
-
-# conversions to arrow types
-arrowvector(::Type{Dates.Date}, x, i, nl, fi, de, ded, meta; kw...) =
-    arrowvector(converter(DATE, x), i, nl, fi, de, ded, meta; kw...)
-arrowvector(::Type{Dates.Time}, x, i, nl, fi, de, ded, meta; kw...) =
-    arrowvector(converter(TIME, x), i, nl, fi, de, ded, meta; kw...)
-arrowvector(::Type{Dates.DateTime}, x, i, nl, fi, de, ded, meta; kw...) =
-    arrowvector(converter(DATETIME, x), i, nl, fi, de, ded, meta; kw...)
-arrowvector(::Type{ZonedDateTime}, x, i, nl, fi, de, ded, meta; kw...) =
-    arrowvector(converter(Timestamp{Meta.TimeUnit.MILLISECOND, Symbol(x[1].timezone)}, x), i, nl, fi, de, ded, meta; kw...)
-arrowvector(::Type{P}, x, i, nl, fi, de, ded, meta; kw...) where {P <: Dates.Period} =
-    arrowvector(converter(Duration{arrowperiodtype(P)}, x), i, nl, fi, de, ded, meta; kw...)
-
-# fallback that calls ArrowType
-function arrowvector(::Type{S}, x, i, nl, fi, de, ded, meta; kw...) where {S}
-    if ArrowTypes.istyperegistered(S)
-        meta = meta === nothing ? Dict{String, String}() : meta
-        arrowtype = ArrowTypes.getarrowtype!(meta, S)
-        if arrowtype === S
-            return arrowvector(ArrowType(S), x, i, nl, fi, de, ded, meta; kw...)
-        else
-            return arrowvector(converter(arrowtype, x), i, nl, fi, de, ded, meta; kw...)
-        end
-    end
-    return arrowvector(ArrowType(S), x, i, nl, fi, de, ded, meta; kw...)
-end
-
-arrowvector(::NullType, x, i, nl, fi, de, ded, meta; kw...) = MissingVector(length(x))
-compress(Z::Meta.CompressionType, comp, v::MissingVector) =
-    Compressed{Z, MissingVector}(v, CompressedBuffer[], length(v), length(v), Compressed[])
-
-function makenodesbuffers!(col::MissingVector, fieldnodes, fieldbuffers, bufferoffset, alignment)
-    push!(fieldnodes, FieldNode(length(col), length(col)))
-    @debug 1 "made field node: nodeidx = $(length(fieldnodes)), col = $(typeof(col)), len = $(fieldnodes[end].length), nc = $(fieldnodes[end].null_count)"
-    return bufferoffset
-end
-
-function writebuffer(io, col::MissingVector, alignment)
-    return
-end
-
-"""
-    Arrow.ValidityBitmap
-
-A bit-packed array type where each bit corresponds to an element in an
-[`ArrowVector`](@ref), indicating whether that element is "valid" (bit == 1),
-or not (bit == 0). Used to indicate element missingness (whether it's null).
-
-If the null count of an array is zero, the `ValidityBitmap` will be "emtpy"
-and all elements are treated as "valid"/non-null.
-"""
-struct ValidityBitmap <: ArrowVector{Bool}
-    bytes::Vector{UInt8} # arrow memory blob
-    pos::Int # starting byte of validity bitmap
-    ℓ::Int # # of _elements_ (not bytes!) in bitmap (because bitpacking)
-    nc::Int # null count
-end
-
-Base.size(p::ValidityBitmap) = (p.ℓ,)
-nullcount(x::ValidityBitmap) = x.nc
-
-function ValidityBitmap(x)
-    T = eltype(x)
-    if !(T >: Missing)
-        return ValidityBitmap(UInt8[], 1, length(x), 0)
-    end
-    len = length(x)
-    blen = cld(len, 8)
-    bytes = Vector{UInt8}(undef, blen)
-    st = iterate(x)
-    nc = 0
-    b = 0xff
-    j = k = 1
-    for y in x
-        if y === missing
-            nc += 1
-            b = setbit(b, false, j)
-        end
-        j += 1
-        if j == 9
-            @inbounds bytes[k] = b
-            b = 0xff
-            j = 1
-            k += 1
-        end
-    end
-    if j > 1
-        bytes[k] = b
-    end
-    return ValidityBitmap(nc == 0 ? UInt8[] : bytes, 1, nc == 0 ? 0 : len, nc)
-end
-
-@propagate_inbounds function Base.getindex(p::ValidityBitmap, i::Integer)
-    # no boundscheck because parent array should do it
-    # if a validity bitmap is empty, it either means:
-    #   1) the parent array null_count is 0, so all elements are valid
-    #   2) parent array is also empty, so "all" elements are valid
-    p.nc == 0 && return true
-    # translate element index to bitpacked byte index
-    a, b = fldmod1(i, 8)
-    @inbounds byte = p.bytes[p.pos + a - 1]
-    # check individual bit of byte
-    return getbit(byte, b)
-end
-
-@propagate_inbounds function Base.setindex!(p::ValidityBitmap, v, i::Integer)
-    x = convert(Bool, v)
-    p.ℓ == 0 && !x && throw(BoundsError(p, i))
-    a, b = fldmod1(i, 8)
-    @inbounds byte = p.bytes[p.pos + a - 1]
-    @inbounds p.bytes[p.pos + a - 1] = setbit(byte, x, b)
-    return v
-end
-
-function writebitmap(io, col::ArrowVector, alignment)
-    v = col.validity
-    @debug 1 "writing validity bitmap: nc = $(v.nc), n = $(cld(v.ℓ, 8))"
-    v.nc == 0 && return 0
-    n = Base.write(io, view(v.bytes, v.pos:(v.pos + cld(v.ℓ, 8) - 1)))
-    return n + writezeros(io, paddinglength(n, alignment))
-end
-
-include("compressed.jl")
-include("primitive.jl")
-include("bool.jl")
-include("list.jl")
-include("fixedsizelist.jl")
-include("map.jl")
-include("struct.jl")
-include("unions.jl")
-include("dictencoding.jl")
diff --git a/julia/Arrow/src/arraytypes/bool.jl b/julia/Arrow/src/arraytypes/bool.jl
deleted file mode 100644
index 07e0416..0000000
--- a/julia/Arrow/src/arraytypes/bool.jl
+++ /dev/null
@@ -1,111 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-    Arrow.BoolVector
-
-A bit-packed array type, similar to [`ValidityBitmap`](@ref), but which
-holds boolean values, `true` or `false`.
-"""
-struct BoolVector{T} <: ArrowVector{T}
-    arrow::Vector{UInt8} # need to hold a reference to arrow memory blob
-    pos::Int
-    validity::ValidityBitmap
-    ℓ::Int64
-    metadata::Union{Nothing, Dict{String, String}}
-end
-
-Base.size(p::BoolVector) = (p.ℓ,)
-
-@propagate_inbounds function Base.getindex(p::BoolVector{T}, i::Integer) where {T}
-    @boundscheck checkbounds(p, i)
-    if T >: Missing
-        @inbounds !p.validity[i] && return missing
-    end
-    a, b = fldmod1(i, 8)
-    @inbounds byte = p.arrow[p.pos + a - 1]
-    # check individual bit of byte
-    return getbit(byte, b)
-end
-
-@propagate_inbounds function Base.setindex!(p::BoolVector, v, i::Integer)
-    @boundscheck checkbounds(p, i)
-    x = convert(Bool, v)
-    a, b = fldmod1(i, 8)
-    @inbounds byte = p.arrow[p.pos + a - 1]
-    @inbounds p.arrow[p.pos + a - 1] = setbit(byte, x, b)
-    return v
-end
-
-arrowvector(::BoolType, x::BoolVector, i, nl, fi, de, ded, meta; kw...) = x
-
-function arrowvector(::BoolType, x, i, nl, fi, de, ded, meta; kw...)
-    validity = ValidityBitmap(x)
-    len = length(x)
-    blen = cld(len, 8)
-    bytes = Vector{UInt8}(undef, blen)
-    b = 0xff
-    j = k = 1
-    for y in x
-        if y === false
-            b = setbit(b, false, j)
-        end
-        j += 1
-        if j == 9
-            @inbounds bytes[k] = b
-            b = 0xff
-            j = 1
-            k += 1
-        end
-    end
-    if j > 1
-        bytes[k] = b
-    end
-    return BoolVector{eltype(x)}(bytes, 1, validity, len, meta)
-end
-
-function compress(Z::Meta.CompressionType, comp, p::P) where {P <: BoolVector}
-    len = length(p)
-    nc = nullcount(p)
-    validity = compress(Z, comp, p.validity)
-    data = compress(Z, comp, view(p.arrow, p.pos:(p.pos + cld(p.ℓ, 8) - 1)))
-    return Compressed{Z, P}(p, [validity, data], len, nc, Compressed[])
-end
-
-function makenodesbuffers!(col::BoolVector, fieldnodes, fieldbuffers, bufferoffset, alignment)
-    len = length(col)
-    nc = nullcount(col)
-    push!(fieldnodes, FieldNode(len, nc))
-    @debug 1 "made field node: nodeidx = $(length(fieldnodes)), col = $(typeof(col)), len = $(fieldnodes[end].length), nc = $(fieldnodes[end].null_count)"
-    # validity bitmap
-    blen = nc == 0 ? 0 : bitpackedbytes(len, alignment)
-    push!(fieldbuffers, Buffer(bufferoffset, blen))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    # adjust buffer offset, make primitive array buffer
-    bufferoffset += blen
-    blen = bitpackedbytes(len, alignment)
-    push!(fieldbuffers, Buffer(bufferoffset, blen))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    return bufferoffset + blen
-end
-
-function writebuffer(io, col::BoolVector, alignment)
-    @debug 1 "writebuffer: col = $(typeof(col))"
-    @debug 2 col
-    writebitmap(io, col, alignment)
-    n = Base.write(io, view(col.arrow, col.pos:(col.pos + cld(col.ℓ, 8) - 1)))
-    return n + writezeros(io, paddinglength(n, alignment))
-end
diff --git a/julia/Arrow/src/arraytypes/compressed.jl b/julia/Arrow/src/arraytypes/compressed.jl
deleted file mode 100644
index 5f8e67e..0000000
--- a/julia/Arrow/src/arraytypes/compressed.jl
+++ /dev/null
@@ -1,90 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-struct CompressedBuffer
-    data::Vector{UInt8}
-    uncompressedlength::Int64
-end
-
-"""
-    Arrow.Compressed
-
-Represents the compressed version of an [`ArrowVector`](@ref).
-Holds a reference to the original column. May have `Compressed`
-children for nested array types.
-"""
-struct Compressed{Z, A}
-    data::A
-    buffers::Vector{CompressedBuffer}
-    len::Int64
-    nullcount::Int64
-    children::Vector{Compressed}
-end
-
-Base.length(c::Compressed) = c.len
-Base.eltype(c::Compressed{Z, A}) where {Z, A} = eltype(A)
-getmetadata(x::Compressed) = getmetadata(x.data)
-compressiontype(c::Compressed{Z}) where {Z} = Z
-
-function compress(Z::Meta.CompressionType, comp, x::Array)
-    GC.@preserve x begin
-        y = unsafe_wrap(Array, convert(Ptr{UInt8}, pointer(x)), sizeof(x))
-        return CompressedBuffer(transcode(comp, y), length(y))
-    end
-end
-
-compress(Z::Meta.CompressionType, comp, x) = compress(Z, comp, convert(Array, x))
-
-compress(Z::Meta.CompressionType, comp, v::ValidityBitmap) =
-    v.nc == 0 ? CompressedBuffer(UInt8[], 0) : compress(Z, comp, view(v.bytes, v.pos:(v.pos + cld(v.ℓ, 8) - 1)))
-
-function makenodesbuffers!(col::Compressed, fieldnodes, fieldbuffers, bufferoffset, alignment)
-    push!(fieldnodes, FieldNode(col.len, col.nullcount))
-    @debug 1 "made field node: nodeidx = $(length(fieldnodes)), col = $(typeof(col)), len = $(fieldnodes[end].length), nc = $(fieldnodes[end].null_count)"
-    for buffer in col.buffers
-        blen = length(buffer.data) == 0 ? 0 : 8 + length(buffer.data)
-        push!(fieldbuffers, Buffer(bufferoffset, blen))
-        @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-        bufferoffset += padding(blen, alignment)
-    end
-    for child in col.children
-        bufferoffset = makenodesbuffers!(child, fieldnodes, fieldbuffers, bufferoffset, alignment)
-    end
-    return bufferoffset
-end
-
-function writearray(io, b::CompressedBuffer)
-    if length(b.data) > 0
-        n = Base.write(io, b.uncompressedlength)
-        @debug 1 "writing compressed buffer: uncompressedlength = $(b.uncompressedlength), n = $(length(b.data))"
-        @debug 2 b.data
-        return n + Base.write(io, b.data)
-    end
-    return 0
-end
-
-function writebuffer(io, col::Compressed, alignment)
-    @debug 1 "writebuffer: col = $(typeof(col))"
-    @debug 2 col
-    for buffer in col.buffers
-        n = writearray(io, buffer)
-        writezeros(io, paddinglength(n, alignment))
-    end
-    for child in col.children
-        writebuffer(io, child, alignment)
-    end
-    return
-end
diff --git a/julia/Arrow/src/arraytypes/dictencoding.jl b/julia/Arrow/src/arraytypes/dictencoding.jl
deleted file mode 100644
index eca7c4d..0000000
--- a/julia/Arrow/src/arraytypes/dictencoding.jl
+++ /dev/null
@@ -1,248 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-    Arrow.DictEncoding
-
-Represents the "pool" of possible values for a [`DictEncoded`](@ref)
-array type. Whether the order of values is significant can be checked
-by looking at the `isOrdered` boolean field.
-"""
-mutable struct DictEncoding{T, A} <: ArrowVector{T}
-    id::Int64
-    data::A
-    isOrdered::Bool
-    metadata::Union{Nothing, Dict{String, String}}
-end
-
-Base.size(d::DictEncoding) = size(d.data)
-
-@propagate_inbounds function Base.getindex(d::DictEncoding{T}, i::Integer) where {T}
-    @boundscheck checkbounds(d, i)
-    return @inbounds ArrowTypes.arrowconvert(T, d.data[i])
-end
-
-# convenience wrapper to signal that an input column should be
-# dict encoded when written to the arrow format
-struct DictEncodeType{T} end
-getT(::Type{DictEncodeType{T}}) where {T} = T
-
-"""
-    Arrow.DictEncode(::AbstractVector, id::Integer=nothing)
-
-Signals that a column/array should be dictionary encoded when serialized
-to the arrow streaming/file format. An optional `id` number may be provided
-to signal that multiple columns should use the same pool when being
-dictionary encoded.
-"""
-struct DictEncode{T, A} <: AbstractVector{DictEncodeType{T}}
-    id::Int64
-    data::A
-end
-
-DictEncode(x::A, id=-1) where {A} = DictEncode{eltype(A), A}(id, x)
-Base.IndexStyle(::Type{<:DictEncode}) = Base.IndexLinear()
-Base.size(x::DictEncode) = (length(x.data),)
-Base.iterate(x::DictEncode, st...) = iterate(x.data, st...)
-Base.getindex(x::DictEncode, i::Int) = getindex(x.data, i)
-ArrowTypes.ArrowType(::Type{<:DictEncodeType}) = DictEncodedType()
-
-"""
-    Arrow.DictEncoded
-
-A dictionary encoded array type (similar to a `PooledArray`). Behaves just
-like a normal array in most respects; internally, possible values are stored
-in the `encoding::DictEncoding` field, while the `indices::Vector{<:Integer}`
-field holds the "codes" of each element for indexing into the encoding pool.
-Any column/array can be dict encoding when serializing to the arrow format
-either by passing the `dictencode=true` keyword argument to [`Arrow.write`](@ref)
-(which causes _all_ columns to be dict encoded), or wrapping individual columns/
-arrays in [`Arrow.DictEncode(x)`](@ref).
-"""
-struct DictEncoded{T, S, A} <: ArrowVector{T}
-    arrow::Vector{UInt8} # need to hold a reference to arrow memory blob
-    validity::ValidityBitmap
-    indices::Vector{S}
-    encoding::DictEncoding{T, A}
-    metadata::Union{Nothing, Dict{String, String}}
-end
-
-DictEncoded(b::Vector{UInt8}, v::ValidityBitmap, inds::Vector{S}, encoding::DictEncoding{T, A}, meta) where {S, T, A} =
-    DictEncoded{T, S, A}(b, v, inds, encoding, meta)
-
-Base.size(d::DictEncoded) = size(d.indices)
-
-isdictencoded(d::DictEncoded) = true
-isdictencoded(x) = false
-isdictencoded(c::Compressed{Z, A}) where {Z, A <: DictEncoded} = true
-
-signedtype(::Type{UInt8}) = Int8
-signedtype(::Type{UInt16}) = Int16
-signedtype(::Type{UInt32}) = Int32
-signedtype(::Type{UInt64}) = Int64
-
-indtype(d::DictEncoded{T, S, A}) where {T, S, A} = S
-indtype(c::Compressed{Z, A}) where {Z, A <: DictEncoded} = indtype(c.data)
-
-dictencodeid(colidx, nestedlevel, fieldid) = (Int64(nestedlevel) << 48) | (Int64(fieldid) << 32) | Int64(colidx)
-
-getid(d::DictEncoded) = d.encoding.id
-getid(c::Compressed{Z, A}) where {Z, A <: DictEncoded} = c.data.encoding.id
-
-arrowvector(::DictEncodedType, x::DictEncoded, i, nl, fi, de, ded, meta; kw...) = x
-
-function arrowvector(::DictEncodedType, x, i, nl, fi, de, ded, meta; dictencode::Bool=false, dictencodenested::Bool=false, kw...)
-    @assert x isa DictEncode
-    id = x.id == -1 ? dictencodeid(i, nl, fi) : x.id
-    x = x.data
-    len = length(x)
-    validity = ValidityBitmap(x)
-    if !haskey(de, id)
-        # dict encoding doesn't exist yet, so create for 1st time
-        if DataAPI.refarray(x) === x
-            # need to encode ourselves
-            x = PooledArray(x, encodingtype(length(x)))
-            inds = DataAPI.refarray(x)
-        else
-            inds = copy(DataAPI.refarray(x))
-        end
-        # adjust to "offset" instead of index
-        for i = 1:length(inds)
-            @inbounds inds[i] -= 1
-        end
-        pool = DataAPI.refpool(x)
-        # horrible hack? yes. better than taking CategoricalArrays dependency? also yes.
-        if typeof(pool).name.name == :CategoricalRefPool
-            pool = [get(pool[i]) for i = 1:length(pool)]
-        end
-        data = arrowvector(pool, i, nl, fi, de, ded, nothing; dictencode=dictencodenested, dictencodenested=dictencodenested, dictencoding=true, kw...)
-        encoding = DictEncoding{eltype(data), typeof(data)}(id, data, false, getmetadata(data))
-        de[id] = Lockable(encoding)
-    else
-        # encoding already exists
-          # compute inds based on it
-          # if value doesn't exist in encoding, push! it
-          # also add to deltas updates
-        encodinglockable = de[id]
-        @lock encodinglockable begin
-            encoding = encodinglockable.x
-            len = length(x)
-            ET = encodingtype(len)
-            pool = Dict{Union{eltype(encoding), eltype(x)}, ET}(a => (b - 1) for (b, a) in enumerate(encoding))
-            deltas = eltype(x)[]
-            inds = Vector{ET}(undef, len)
-            categorical = typeof(x).name.name == :CategoricalArray
-            for (j, val) in enumerate(x)
-                if categorical
-                    val = get(val)
-                end
-                @inbounds inds[j] = get!(pool, val) do
-                    push!(deltas, val)
-                    length(pool)
-                end
-            end
-            if !isempty(deltas)
-                data = arrowvector(deltas, i, nl, fi, de, ded, nothing; dictencode=dictencodenested, dictencodenested=dictencodenested, dictencoding=true, kw...)
-                push!(ded, DictEncoding{eltype(data), typeof(data)}(id, data, false, getmetadata(data)))
-                if typeof(encoding.data) <: ChainedVector
-                    append!(encoding.data, data)
-                else
-                    data2 = ChainedVector([encoding.data, data])
-                    encoding = DictEncoding{eltype(data2), typeof(data2)}(id, data2, false, getmetadata(encoding))
-                    de[id] = Lockable(encoding)
-                end
-            end
-        end
-    end
-    if meta !== nothing && getmetadata(encoding) !== nothing
-        merge!(meta, getmetadata(encoding))
-    elseif getmetadata(encoding) !== nothing
-        meta = getmetadata(encoding)
-    end
-    return DictEncoded(UInt8[], validity, inds, encoding, meta)
-end
-
-@propagate_inbounds function Base.getindex(d::DictEncoded, i::Integer)
-    @boundscheck checkbounds(d, i)
-    @inbounds valid = d.validity[i]
-    !valid && return missing
-    @inbounds idx = d.indices[i]
-    return @inbounds d.encoding[idx + 1]
-end
-
-@propagate_inbounds function Base.setindex!(d::DictEncoded{T}, v, i::Integer) where {T}
-    @boundscheck checkbounds(d, i)
-    if v === missing
-        @inbounds d.validity[i] = false
-    else
-        ix = findfirst(d.encoding.data, v)
-        if ix === nothing
-            push!(d.encoding.data, v)
-            @inbounds d.indices[i] = length(d.encoding.data) - 1
-        else
-            @inbounds d.indices[i] = ix - 1
-        end
-    end
-    return v
-end
-
-function Base.copy(x::DictEncoded{T, S}) where {T, S}
-    pool = copy(x.encoding.data)
-    valid = x.validity
-    inds = x.indices
-    refs = copy(inds)
-    @inbounds for i = 1:length(inds)
-        refs[i] = refs[i] + one(S)
-    end
-    return PooledArray(PooledArrays.RefArray(refs), Dict{T, S}(val => i for (i, val) in enumerate(pool)), pool)
-end
-
-function compress(Z::Meta.CompressionType, comp, x::A) where {A <: DictEncoded}
-    len = length(x)
-    nc = nullcount(x)
-    validity = compress(Z, comp, x.validity)
-    inds = compress(Z, comp, x.indices)
-    return Compressed{Z, A}(x, [validity, inds], len, nc, Compressed[])
-end
-
-function makenodesbuffers!(col::DictEncoded{T, S}, fieldnodes, fieldbuffers, bufferoffset, alignment) where {T, S}
-    len = length(col)
-    nc = nullcount(col)
-    push!(fieldnodes, FieldNode(len, nc))
-    @debug 1 "made field node: nodeidx = $(length(fieldnodes)), col = $(typeof(col)), len = $(fieldnodes[end].length), nc = $(fieldnodes[end].null_count)"
-    # validity bitmap
-    blen = nc == 0 ? 0 : bitpackedbytes(len, alignment)
-    push!(fieldbuffers, Buffer(bufferoffset, blen))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    bufferoffset += blen
-    # indices
-    blen = sizeof(S) * len
-    push!(fieldbuffers, Buffer(bufferoffset, blen))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    bufferoffset += padding(blen, alignment)
-    return bufferoffset
-end
-
-function writebuffer(io, col::DictEncoded, alignment)
-    @debug 1 "writebuffer: col = $(typeof(col))"
-    @debug 2 col
-    writebitmap(io, col, alignment)
-    # write indices
-    n = writearray(io, col.indices)
-    @debug 1 "writing array: col = $(typeof(col.indices)), n = $n, padded = $(padding(n, alignment))"
-    writezeros(io, paddinglength(n, alignment))
-    return
-end
diff --git a/julia/Arrow/src/arraytypes/fixedsizelist.jl b/julia/Arrow/src/arraytypes/fixedsizelist.jl
deleted file mode 100644
index 9393049..0000000
--- a/julia/Arrow/src/arraytypes/fixedsizelist.jl
+++ /dev/null
@@ -1,153 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-    Arrow.FixedSizeList
-
-An `ArrowVector` where each element is a "fixed size" list of some kind, like a `NTuple{N, T}`.
-"""
-struct FixedSizeList{T, A <: AbstractVector} <: ArrowVector{T}
-    arrow::Vector{UInt8} # need to hold a reference to arrow memory blob
-    validity::ValidityBitmap
-    data::A
-    ℓ::Int
-    metadata::Union{Nothing, Dict{String, String}}
-end
-
-Base.size(l::FixedSizeList) = (l.ℓ,)
-
-@propagate_inbounds function Base.getindex(l::FixedSizeList{T}, i::Integer) where {T}
-    @boundscheck checkbounds(l, i)
-    N = ArrowTypes.getsize(Base.nonmissingtype(T))
-    off = (i - 1) * N
-    if Base.nonmissingtype(T) !== T
-        return l.validity[i] ? ArrowTypes.arrowconvert(T, ntuple(j->l.data[off + j], N)) : missing
-    else
-        return ArrowTypes.arrowconvert(T, ntuple(j->l.data[off + j], N))
-    end
-end
-
-@propagate_inbounds function Base.setindex!(l::FixedSizeList{T}, v::T, i::Integer) where {T}
-    @boundscheck checkbounds(l, i)
-    if v === missing
-        @inbounds l.validity[i] = false
-    else
-        N = ArrowTypes.getsize(Base.nonmissingtype(T))
-        off = (i - 1) * N
-        foreach(1:N) do j
-            @inbounds l.data[off + j] = v[j]
-        end
-    end
-    return v
-end
-
-# lazy equal-spaced flattener
-struct ToFixedSizeList{T, N, A} <: AbstractVector{T}
-    data::A # A is AbstractVector of AbstractVector or AbstractString
-end
-
-function ToFixedSizeList(input)
-    NT = Base.nonmissingtype(eltype(input)) # typically NTuple{N, T}
-    return ToFixedSizeList{ArrowTypes.gettype(NT), ArrowTypes.getsize(NT), typeof(input)}(input)
-end
-
-Base.IndexStyle(::Type{<:ToFixedSizeList}) = Base.IndexLinear()
-Base.size(x::ToFixedSizeList{T, N}) where {T, N} = (N * length(x.data),)
-
-Base.@propagate_inbounds function Base.getindex(A::ToFixedSizeList{T, N}, i::Integer) where {T, N}
-    @boundscheck checkbounds(A, i)
-    a, b = fldmod1(i, N)
-    @inbounds x = A.data[a]
-    return @inbounds x === missing ? ArrowTypes.default(T) : x[b]
-end
-
-# efficient iteration
-@inline function Base.iterate(A::ToFixedSizeList{T, N}, (i, chunk, chunk_i, len)=(1, 1, 1, length(A))) where {T, N}
-    i > len && return nothing
-    @inbounds y = A.data[chunk]
-    @inbounds x = y === missing ? ArrowTypes.default(T) : y[chunk_i]
-    if chunk_i == N
-        chunk += 1
-        chunk_i = 1
-    else
-        chunk_i += 1
-    end
-    return x, (i + 1, chunk, chunk_i, len)
-end
-
-arrowvector(::FixedSizeListType, x::FixedSizeList, i, nl, fi, de, ded, meta; kw...) = x
-
-function arrowvector(::FixedSizeListType, x, i, nl, fi, de, ded, meta; kw...)
-    len = length(x)
-    validity = ValidityBitmap(x)
-    flat = ToFixedSizeList(x)
-    if eltype(flat) == UInt8
-        data = flat
-    else
-        data = arrowvector(flat, i, nl + 1, fi, de, ded, nothing; kw...)
-    end
-    return FixedSizeList{eltype(x), typeof(data)}(UInt8[], validity, data, len, meta)
-end
-
-function compress(Z::Meta.CompressionType, comp, x::FixedSizeList{T, A}) where {T, A}
-    len = length(x)
-    nc = nullcount(x)
-    validity = compress(Z, comp, x.validity)
-    buffers = [validity]
-    children = Compressed[]
-    if eltype(A) == UInt8
-        push!(buffers, compress(Z, comp, x.data))
-    else
-        push!(children, compress(Z, comp, x.data))
-    end
-    return Compressed{Z, typeof(x)}(x, buffers, len, nc, children)
-end
-
-function makenodesbuffers!(col::FixedSizeList{T, A}, fieldnodes, fieldbuffers, bufferoffset, alignment) where {T, A}
-    len = length(col)
-    nc = nullcount(col)
-    push!(fieldnodes, FieldNode(len, nc))
-    @debug 1 "made field node: nodeidx = $(length(fieldnodes)), col = $(typeof(col)), len = $(fieldnodes[end].length), nc = $(fieldnodes[end].null_count)"
-    # validity bitmap
-    blen = nc == 0 ? 0 : bitpackedbytes(len, alignment)
-    push!(fieldbuffers, Buffer(bufferoffset, blen))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    bufferoffset += blen
-    if eltype(A) === UInt8
-        blen = ArrowTypes.getsize(Base.nonmissingtype(T)) * len
-        push!(fieldbuffers, Buffer(bufferoffset, blen))
-        @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-        bufferoffset += padding(blen, alignment)
-    else
-        bufferoffset = makenodesbuffers!(col.data, fieldnodes, fieldbuffers, bufferoffset, alignment)
-    end
-    return bufferoffset
-end
-
-function writebuffer(io, col::FixedSizeList{T, A}, alignment) where {T, A}
-    @debug 1 "writebuffer: col = $(typeof(col))"
-    @debug 2 col
-    writebitmap(io, col, alignment)
-    # write values array
-    if eltype(A) === UInt8
-        n = writearray(io, UInt8, col.data)
-        @debug 1 "writing array: col = $(typeof(col.data)), n = $n, padded = $(padding(n, alignment))"
-        writezeros(io, paddinglength(n, alignment))
-    else
-        writebuffer(io, col.data, alignment)
-    end
-    return
-end
diff --git a/julia/Arrow/src/arraytypes/list.jl b/julia/Arrow/src/arraytypes/list.jl
deleted file mode 100644
index c975bb3..0000000
--- a/julia/Arrow/src/arraytypes/list.jl
+++ /dev/null
@@ -1,209 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-struct Offsets{T <: Union{Int32, Int64}} <: ArrowVector{Tuple{T, T}}
-    arrow::Vector{UInt8} # need to hold a reference to arrow memory blob
-    offsets::Vector{T}
-end
-
-Base.size(o::Offsets) = (length(o.offsets) - 1,)
-
-@propagate_inbounds function Base.getindex(o::Offsets, i::Integer)
-    @boundscheck checkbounds(o, i)
-    @inbounds lo = o.offsets[i] + 1
-    @inbounds hi = o.offsets[i + 1]
-    return lo, hi
-end
-
-"""
-    Arrow.List
-
-An `ArrowVector` where each element is a variable sized list of some kind, like an `AbstractVector` or `AbstractString`.
-"""
-struct List{T, O, A} <: ArrowVector{T}
-    arrow::Vector{UInt8} # need to hold a reference to arrow memory blob
-    validity::ValidityBitmap
-    offsets::Offsets{O}
-    data::A
-    ℓ::Int
-    metadata::Union{Nothing, Dict{String, String}}
-end
-
-Base.size(l::List) = (l.ℓ,)
-
-@propagate_inbounds function Base.getindex(l::List{T}, i::Integer) where {T}
-    @boundscheck checkbounds(l, i)
-    @inbounds lo, hi = l.offsets[i]
-    if ArrowTypes.isstringtype(T)
-        if Base.nonmissingtype(T) !== T
-            return l.validity[i] ? ArrowTypes.arrowconvert(T, unsafe_string(pointer(l.data, lo), hi - lo + 1)) : missing
-        else
-            return ArrowTypes.arrowconvert(T, unsafe_string(pointer(l.data, lo), hi - lo + 1))
-        end
-    elseif Base.nonmissingtype(T) !== T
-        return l.validity[i] ? ArrowTypes.arrowconvert(T, view(l.data, lo:hi)) : missing
-    else
-        return ArrowTypes.arrowconvert(T, view(l.data, lo:hi))
-    end
-end
-
-# @propagate_inbounds function Base.setindex!(l::List{T}, v, i::Integer) where {T}
-
-# end
-
-# an AbstractVector version of Iterators.flatten
-# code based on SentinelArrays.ChainedVector
-struct ToList{T, stringtype, A, I} <: AbstractVector{T}
-    data::Vector{A} # A is AbstractVector or AbstractString
-    inds::Vector{I}
-end
-
-function ToList(input; largelists::Bool=false)
-    AT = eltype(input)
-    ST = Base.nonmissingtype(AT)
-    stringtype = ArrowTypes.isstringtype(ST)
-    T = stringtype ? UInt8 : eltype(ST)
-    len = stringtype ? ncodeunits : length
-    data = AT[]
-    I = largelists ? Int64 : Int32
-    inds = I[0]
-    sizehint!(data, length(input))
-    sizehint!(inds, length(input))
-    totalsize = I(0)
-    for x in input
-        if x === missing
-            push!(data, missing)
-        else
-            push!(data, x)
-            totalsize += len(x)
-            if I === Int32 && totalsize > 2147483647
-                I = Int64
-                inds = convert(Vector{Int64}, inds)
-            end
-        end
-        push!(inds, totalsize)
-    end
-    return ToList{T, stringtype, AT, I}(data, inds)
-end
-
-Base.IndexStyle(::Type{<:ToList}) = Base.IndexLinear()
-Base.size(x::ToList) = (length(x.inds) == 0 ? 0 : x.inds[end],)
-
-function Base.pointer(A::ToList{UInt8}, i::Integer)
-    chunk = searchsortedfirst(A.inds, i)
-    return pointer(A.data[chunk - 1])
-end
-
-@inline function index(A::ToList, i::Integer)
-    chunk = searchsortedfirst(A.inds, i)
-    return chunk - 1, i - (@inbounds A.inds[chunk - 1])
-end
-
-Base.@propagate_inbounds function Base.getindex(A::ToList{T, stringtype}, i::Integer) where {T, stringtype}
-    @boundscheck checkbounds(A, i)
-    chunk, ix = index(A, i)
-    @inbounds x = A.data[chunk]
-    return @inbounds stringtype ? codeunits(x)[ix] : x[ix]
-end
-
-Base.@propagate_inbounds function Base.setindex!(A::ToList{T, stringtype}, v, i::Integer) where {T, stringtype}
-    @boundscheck checkbounds(A, i)
-    chunk, ix = index(A, i)
-    @inbounds x = A.data[chunk]
-    if stringtype
-        codeunits(x)[ix] = v
-    else
-        x[ix] = v
-    end
-    return v
-end
-
-# efficient iteration
-@inline function Base.iterate(A::ToList{T, stringtype}) where {T, stringtype}
-    length(A) == 0 && return nothing
-    i = 1
-    chunk = 2
-    chunk_i = 1
-    chunk_len = A.inds[chunk]
-    while i > chunk_len
-        chunk += 1
-        chunk_len = A.inds[chunk]
-    end
-    val = A.data[chunk - 1]
-    x = stringtype ? codeunits(val)[1] : val[1]
-    # find next valid index
-    i += 1
-    if i > chunk_len
-        while true
-            chunk += 1
-            chunk > length(A.inds) && break
-            chunk_len = A.inds[chunk]
-            i <= chunk_len && break
-        end
-    else
-        chunk_i += 1
-    end
-    return x, (i, chunk, chunk_i, chunk_len, length(A))
-end
-
-@inline function Base.iterate(A::ToList{T, stringtype}, (i, chunk, chunk_i, chunk_len, len)) where {T, stringtype}
-    i > len && return nothing
-    @inbounds val = A.data[chunk - 1]
-    @inbounds x = stringtype ? codeunits(val)[chunk_i] : val[chunk_i]
-    i += 1
-    if i > chunk_len
-        chunk_i = 1
-        while true
-            chunk += 1
-            chunk > length(A.inds) && break
-            @inbounds chunk_len = A.inds[chunk]
-            i <= chunk_len && break
-        end
-    else
-        chunk_i += 1
-    end
-    return x, (i, chunk, chunk_i, chunk_len, len)
-end
-
-arrowvector(::ListType, x::List, i, nl, fi, de, ded, meta; kw...) = x
-
-function arrowvector(::ListType, x, i, nl, fi, de, ded, meta; largelists::Bool=false, kw...)
-    len = length(x)
-    validity = ValidityBitmap(x)
-    flat = ToList(x; largelists=largelists)
-    offsets = Offsets(UInt8[], flat.inds)
-    if eltype(flat) == UInt8 # binary or utf8string
-        data = flat
-    else
-        data = arrowvector(flat, i, nl + 1, fi, de, ded, nothing; lareglists=largelists, kw...)
-    end
-    return List{eltype(x), eltype(flat.inds), typeof(data)}(UInt8[], validity, offsets, data, len, meta)
-end
-
-function compress(Z::Meta.CompressionType, comp, x::List{T, O, A}) where {T, O, A}
-    len = length(x)
-    nc = nullcount(x)
-    validity = compress(Z, comp, x.validity)
-    offsets = compress(Z, comp, x.offsets.offsets)
-    buffers = [validity, offsets]
-    children = Compressed[]
-    if eltype(A) == UInt8
-        push!(buffers, compress(Z, comp, x.data))
-    else
-        push!(children, compress(Z, comp, x.data))
-    end
-    return Compressed{Z, typeof(x)}(x, buffers, len, nc, children)
-end
diff --git a/julia/Arrow/src/arraytypes/map.jl b/julia/Arrow/src/arraytypes/map.jl
deleted file mode 100644
index 8565172..0000000
--- a/julia/Arrow/src/arraytypes/map.jl
+++ /dev/null
@@ -1,115 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-    Arrow.Map
-
-An `ArrowVector` where each element is a "map" of some kind, like a `Dict`.
-"""
-struct Map{T, O, A} <: ArrowVector{T}
-    validity::ValidityBitmap
-    offsets::Offsets{O}
-    data::A
-    ℓ::Int
-    metadata::Union{Nothing, Dict{String, String}}
-end
-
-Base.size(l::Map) = (l.ℓ,)
-
-@propagate_inbounds function Base.getindex(l::Map{T}, i::Integer) where {T}
-    @boundscheck checkbounds(l, i)
-    @inbounds lo, hi = l.offsets[i]
-    if Base.nonmissingtype(T) !== T
-        return l.validity[i] ? ArrowTypes.arrowconvert(T, Dict(x.key => x.value for x in view(l.data, lo:hi))) : missing
-    else
-        return ArrowTypes.arrowconvert(T, Dict(x.key => x.value for x in view(l.data, lo:hi)))
-    end
-end
-
-keyvalues(KT, ::Missing) = missing
-keyvalues(KT, x::AbstractDict) = [KT(k, v) for (k, v) in pairs(x)]
-
-arrowvector(::MapType, x::Map, i, nl, fi, de, ded, meta; kw...) = x
-
-function arrowvector(::MapType, x, i, nl, fi, de, ded, meta; largelists::Bool=false, kw...)
-    len = length(x)
-    validity = ValidityBitmap(x)
-    ET = eltype(x)
-    DT = Base.nonmissingtype(ET)
-    KT = KeyValue{keytype(DT), valtype(DT)}
-    VT = Vector{KT}
-    T = DT !== ET ? Union{Missing, VT} : VT
-    flat = ToList(T[keyvalues(KT, y) for y in x]; largelists=largelists)
-    offsets = Offsets(UInt8[], flat.inds)
-    data = arrowvector(flat, i, nl + 1, fi, de, ded, nothing; lareglists=largelists, kw...)
-    return Map{ET, eltype(flat.inds), typeof(data)}(validity, offsets, data, len, meta)
-end
-
-function compress(Z::Meta.CompressionType, comp, x::A) where {A <: Map}
-    len = length(x)
-    nc = nullcount(x)
-    validity = compress(Z, comp, x.validity)
-    offsets = compress(Z, comp, x.offsets.offsets)
-    buffers = [validity, offsets]
-    children = Compressed[]
-    push!(children, compress(Z, comp, x.data))
-    return Compressed{Z, A}(x, buffers, len, nc, children)
-end
-
-function makenodesbuffers!(col::Union{Map{T, O, A}, List{T, O, A}}, fieldnodes, fieldbuffers, bufferoffset, alignment) where {T, O, A}
-    len = length(col)
-    nc = nullcount(col)
-    push!(fieldnodes, FieldNode(len, nc))
-    @debug 1 "made field node: nodeidx = $(length(fieldnodes)), col = $(typeof(col)), len = $(fieldnodes[end].length), nc = $(fieldnodes[end].null_count)"
-    # validity bitmap
-    blen = nc == 0 ? 0 : bitpackedbytes(len, alignment)
-    push!(fieldbuffers, Buffer(bufferoffset, blen))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    # adjust buffer offset, make array buffer
-    bufferoffset += blen
-    blen = sizeof(O) * (len + 1)
-    push!(fieldbuffers, Buffer(bufferoffset, blen))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    bufferoffset += padding(blen, alignment)
-    if eltype(A) == UInt8
-        blen = length(col.data)
-        push!(fieldbuffers, Buffer(bufferoffset, blen))
-        @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-        bufferoffset += padding(blen, alignment)
-    else
-        bufferoffset = makenodesbuffers!(col.data, fieldnodes, fieldbuffers, bufferoffset, alignment)
-    end
-    return bufferoffset
-end
-
-function writebuffer(io, col::Union{Map{T, O, A}, List{T, O, A}}, alignment) where {T, O, A}
-    @debug 1 "writebuffer: col = $(typeof(col))"
-    @debug 2 col
-    writebitmap(io, col, alignment)
-    # write offsets
-    n = writearray(io, O, col.offsets.offsets)
-    @debug 1 "writing array: col = $(typeof(col.offsets.offsets)), n = $n, padded = $(padding(n, alignment))"
-    writezeros(io, paddinglength(n, alignment))
-    # write values array
-    if eltype(A) == UInt8
-        n = writearray(io, UInt8, col.data)
-        @debug 1 "writing array: col = $(typeof(col.data)), n = $n, padded = $(padding(n, alignment))"
-        writezeros(io, paddinglength(n, alignment))
-    else
-        writebuffer(io, col.data, alignment)
-    end
-    return
-end
diff --git a/julia/Arrow/src/arraytypes/primitive.jl b/julia/Arrow/src/arraytypes/primitive.jl
deleted file mode 100644
index b1796b9..0000000
--- a/julia/Arrow/src/arraytypes/primitive.jl
+++ /dev/null
@@ -1,106 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-    Arrow.Primitive
-
-An `ArrowVector` where each element is a "fixed size" scalar of some kind, like an integer, float, decimal, or time type.
-"""
-struct Primitive{T, A} <: ArrowVector{T}
-    arrow::Vector{UInt8} # need to hold a reference to arrow memory blob
-    validity::ValidityBitmap
-    data::A
-    ℓ::Int64
-    metadata::Union{Nothing, Dict{String, String}}
-end
-
-Primitive(::Type{T}, b::Vector{UInt8}, v::ValidityBitmap, data::A, l, meta) where {T, A} =
-    Primitive{T, A}(b, v, data, l, meta)
-
-Base.size(p::Primitive) = (p.ℓ,)
-
-function Base.copy(p::Primitive{T, A}) where {T, A}
-    if nullcount(p) == 0 && T === eltype(A)
-        return copy(p.data)
-    else
-        return convert(Array, p)
-    end
-end
-
-@propagate_inbounds function Base.getindex(p::Primitive{T}, i::Integer) where {T}
-    @boundscheck checkbounds(p, i)
-    if T >: Missing
-        return @inbounds (p.validity[i] ? ArrowTypes.arrowconvert(T, p.data[i]) : missing)
-    else
-        return @inbounds ArrowTypes.arrowconvert(T, p.data[i])
-    end
-end
-
-@propagate_inbounds function Base.setindex!(p::Primitive{T}, v, i::Integer) where {T}
-    @boundscheck checkbounds(p, i)
-    if T >: Missing
-        if v === missing
-            @inbounds p.validity[i] = false
-        else
-            @inbounds p.data[i] = convert(Base.nonmissingtype(T), v)
-        end
-    else
-        @inbounds p.data[i] = convert(Base.nonmissingtype(T), v)
-    end
-    return v
-end
-
-arrowvector(::PrimitiveType, x::Primitive, i, nl, fi, de, ded, meta; kw...) = x
-
-function arrowvector(::PrimitiveType, x, i, nl, fi, de, ded, meta; kw...)
-    validity = ValidityBitmap(x)
-    return Primitive(eltype(x), UInt8[], validity, x, length(x), meta)
-end
-
-function compress(Z::Meta.CompressionType, comp, p::P) where {P <: Primitive}
-    len = length(p)
-    nc = nullcount(p)
-    validity = compress(Z, comp, p.validity)
-    data = compress(Z, comp, p.data)
-    return Compressed{Z, P}(p, [validity, data], len, nc, Compressed[])
-end
-
-function makenodesbuffers!(col::Primitive{T}, fieldnodes, fieldbuffers, bufferoffset, alignment) where {T}
-    len = length(col)
-    nc = nullcount(col)
-    push!(fieldnodes, FieldNode(len, nc))
-    @debug 1 "made field node: nodeidx = $(length(fieldnodes)), col = $(typeof(col)), len = $(fieldnodes[end].length), nc = $(fieldnodes[end].null_count)"
-    # validity bitmap
-    blen = nc == 0 ? 0 : bitpackedbytes(len, alignment)
-    push!(fieldbuffers, Buffer(bufferoffset, blen))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    # adjust buffer offset, make primitive array buffer
-    bufferoffset += blen
-    blen = len * sizeof(Base.nonmissingtype(T))
-    push!(fieldbuffers, Buffer(bufferoffset, blen))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    return bufferoffset + padding(blen, alignment)
-end
-
-function writebuffer(io, col::Primitive{T}, alignment) where {T}
-    @debug 1 "writebuffer: col = $(typeof(col))"
-    @debug 2 col
-    writebitmap(io, col, alignment)
-    n = writearray(io, Base.nonmissingtype(T), col.data)
-    @debug 1 "writing array: col = $(typeof(col.data)), n = $n, padded = $(padding(n, alignment))"
-    writezeros(io, paddinglength(n, alignment))
-    return
-end
diff --git a/julia/Arrow/src/arraytypes/struct.jl b/julia/Arrow/src/arraytypes/struct.jl
deleted file mode 100644
index eaa9661..0000000
--- a/julia/Arrow/src/arraytypes/struct.jl
+++ /dev/null
@@ -1,130 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-    Arrow.Struct
-
-An `ArrowVector` where each element is a "struct" of some kind with ordered, named fields, like a `NamedTuple{names, types}` or regular julia `struct`.
-"""
-struct Struct{T, S} <: ArrowVector{T}
-    validity::ValidityBitmap
-    data::S # Tuple of ArrowVector
-    ℓ::Int
-    metadata::Union{Nothing, Dict{String, String}}
-end
-
-Base.size(s::Struct) = (s.ℓ,)
-
-@propagate_inbounds function Base.getindex(s::Struct{T}, i::Integer) where {T}
-    @boundscheck checkbounds(s, i)
-    NT = Base.nonmissingtype(T)
-    if ArrowTypes.structtype(NT) === ArrowTypes.NAMEDTUPLE
-        if NT !== T
-            return s.validity[i] ? NT(ntuple(j->s.data[j][i], fieldcount(NT))) : missing
-        else
-            return NT(ntuple(j->s.data[j][i], fieldcount(NT)))
-        end
-    elseif ArrowTypes.structtype(NT) === ArrowTypes.STRUCT
-        if NT !== T
-            return s.validity[i] ? NT(ntuple(j->s.data[j][i], fieldcount(NT))...) : missing
-        else
-            return NT(ntuple(j->s.data[j][i], fieldcount(NT))...)
-        end
-    end
-end
-
-@propagate_inbounds function Base.setindex!(s::Struct{T}, v::T, i::Integer) where {T}
-    @boundscheck checkbounds(s, i)
-    if v === missing
-        @inbounds s.validity[i] = false
-    else
-        NT = Base.nonmissingtype(T)
-        N = fieldcount(NT)
-        foreach(1:N) do j
-            @inbounds s.data[j][i] = getfield(v, j)
-        end
-    end
-    return v
-end
-
-struct ToStruct{T, i, A} <: AbstractVector{T}
-    data::A # eltype is NamedTuple or some struct
-end
-
-ToStruct(x::A, j::Integer) where {A} = ToStruct{fieldtype(Base.nonmissingtype(eltype(A)), j), j, A}(x)
-
-Base.IndexStyle(::Type{<:ToStruct}) = Base.IndexLinear()
-Base.size(x::ToStruct) = (length(x.data),)
-
-Base.@propagate_inbounds function Base.getindex(A::ToStruct{T, j}, i::Integer) where {T, j}
-    @boundscheck checkbounds(A, i)
-    @inbounds x = A.data[i]
-    return x === missing ? ArrowTypes.default(T) : getfield(x, j)
-end
-
-arrowvector(::StructType, x::Struct, i, nl, fi, de, ded, meta; kw...) = x
-
-function arrowvector(::StructType, x, i, nl, fi, de, ded, meta; kw...)
-    len = length(x)
-    validity = ValidityBitmap(x)
-    T = Base.nonmissingtype(eltype(x))
-    if ArrowTypes.structtype(T) === ArrowTypes.STRUCT
-        meta = meta === nothing ? Dict{String, String}() : meta
-        ArrowTypes.registertype!(T, T)
-        ArrowTypes.getarrowtype!(meta, T)
-    end
-    data = Tuple(arrowvector(ToStruct(x, j), i, nl + 1, j, de, ded, nothing; kw...) for j = 1:fieldcount(T))
-    return Struct{eltype(x), typeof(data)}(validity, data, len, meta)
-end
-
-function compress(Z::Meta.CompressionType, comp, x::A) where {A <: Struct}
-    len = length(x)
-    nc = nullcount(x)
-    validity = compress(Z, comp, x.validity)
-    buffers = [validity]
-    children = Compressed[]
-    for y in x.data
-        push!(children, compress(Z, comp, y))
-    end
-    return Compressed{Z, A}(x, buffers, len, nc, children)
-end
-
-function makenodesbuffers!(col::Struct{T}, fieldnodes, fieldbuffers, bufferoffset, alignment) where {T}
-    len = length(col)
-    nc = nullcount(col)
-    push!(fieldnodes, FieldNode(len, nc))
-    @debug 1 "made field node: nodeidx = $(length(fieldnodes)), col = $(typeof(col)), len = $(fieldnodes[end].length), nc = $(fieldnodes[end].null_count)"
-    # validity bitmap
-    blen = nc == 0 ? 0 : bitpackedbytes(len, alignment)
-    push!(fieldbuffers, Buffer(bufferoffset, blen))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    bufferoffset += blen
-    for child in col.data
-        bufferoffset = makenodesbuffers!(child, fieldnodes, fieldbuffers, bufferoffset, alignment)
-    end
-    return bufferoffset
-end
-
-function writebuffer(io, col::Struct, alignment)
-    @debug 1 "writebuffer: col = $(typeof(col))"
-    @debug 2 col
-    writebitmap(io, col, alignment)
-    # write values arrays
-    for child in col.data
-        writebuffer(io, child, alignment)
-    end
-    return
-end
diff --git a/julia/Arrow/src/arraytypes/unions.jl b/julia/Arrow/src/arraytypes/unions.jl
deleted file mode 100644
index 64d8630..0000000
--- a/julia/Arrow/src/arraytypes/unions.jl
+++ /dev/null
@@ -1,279 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Union arrays
-# need a custom representation of Union types since arrow unions
-# are ordered, and possibly indirected via separate typeIds array
-# here, T is Meta.UnionMode.Dense or Meta.UnionMode.Sparse,
-# typeIds is a NTuple{N, Int32}, and U is a Tuple{...} of the
-# unioned types
-struct UnionT{T, typeIds, U}
-end
-
-unionmode(::Type{UnionT{T, typeIds, U}}) where {T, typeIds, U} = T
-typeids(::Type{UnionT{T, typeIds, U}}) where {T, typeIds, U} = typeIds
-Base.eltype(::Type{UnionT{T, typeIds, U}}) where {T, typeIds, U} = U
-
-ArrowTypes.ArrowType(::Type{<:UnionT}) = ArrowTypes.UnionType()
-
-# iterate a Julia Union{...} type, producing an array of unioned types
-function eachunion(U::Union, elems=nothing)
-    if elems === nothing
-        return eachunion(U.b, Type[U.a])
-    else
-        push!(elems, U.a)
-        return eachunion(U.b, elems)
-    end
-end
-
-function eachunion(T, elems)
-    push!(elems, T)
-    return elems
-end
-
-# produce typeIds, offsets, data tuple for DenseUnion
-isatypeid(x::T, ::Type{types}) where {T, types} = isatypeid(x, fieldtype(types, 1), types, 1)
-isatypeid(x::T, ::Type{S}, ::Type{types}, i) where {T, S, types} = x isa S ? i : isatypeid(x, fieldtype(types, i + 1), types, i + 1)
-
-"""
-    Arrow.DenseUnion
-
-An `ArrowVector` where the type of each element is one of a fixed set of types, meaning its eltype is like a julia `Union{type1, type2, ...}`.
-An `Arrow.DenseUnion`, in comparison to `Arrow.SparseUnion`, stores elements in a set of arrays, one array per possible type, and an "offsets"
-array, where each offset element is the index into one of the typed arrays. This allows a sort of "compression", where no extra space is
-used/allocated to store all the elements.
-"""
-struct DenseUnion{T, S} <: ArrowVector{T}
-    arrow::Vector{UInt8} # need to hold a reference to arrow memory blob
-    arrow2::Vector{UInt8} # if arrow blob is compressed, need a 2nd reference for uncompressed offsets bytes
-    typeIds::Vector{UInt8}
-    offsets::Vector{Int32}
-    data::S # Tuple of ArrowVector
-    metadata::Union{Nothing, Dict{String, String}}
-end
-
-Base.size(s::DenseUnion) = size(s.typeIds)
-nullcount(x::DenseUnion) = 0 # DenseUnion has no validity bitmap; only children do
-
-@propagate_inbounds function Base.getindex(s::DenseUnion{T}, i::Integer) where {T}
-    @boundscheck checkbounds(s, i)
-    @inbounds typeId = s.typeIds[i]
-    @inbounds off = s.offsets[i]
-    @inbounds x = s.data[typeId + 1][off + 1]
-    return x
-end
-
-@propagate_inbounds function Base.setindex!(s::DenseUnion{UnionT{T, typeIds, U}}, v, i::Integer) where {T, typeIds, U}
-    @boundscheck checkbounds(s, i)
-    @inbounds typeId = s.typeIds[i]
-    typeids = typeIds === nothing ? (0:(fieldcount(U) - 1)) : typeIds
-    vtypeId = Int8(typeids[isatypeid(v, U)])
-    if typeId == vtypeId
-        @inbounds off = s.offsets[i]
-        @inbounds s.data[typeId +1][off + 1] = v
-    else
-        throw(ArgumentError("type of item to set $(typeof(v)) must match existing item $(fieldtype(U, typeid))"))
-    end
-    return v
-end
-
-# convenience wrappers for signaling that an array shoudld be written
-# as with dense/sparse union arrow buffers
-struct DenseUnionVector{T, U} <: AbstractVector{UnionT{Meta.UnionMode.Dense, nothing, U}}
-    itr::T
-end
-
-DenseUnionVector(x::T) where {T} = DenseUnionVector{T, Tuple{eachunion(eltype(x))...}}(x)
-Base.IndexStyle(::Type{<:DenseUnionVector}) = Base.IndexLinear()
-Base.size(x::DenseUnionVector) = (length(x.itr),)
-Base.iterate(x::DenseUnionVector, st...) = iterate(x.itr, st...)
-Base.getindex(x::DenseUnionVector, i::Int) = getindex(x.itr, i)
-
-function todense(::Type{UnionT{T, typeIds, U}}, x) where {T, typeIds, U}
-    typeids = typeIds === nothing ? (0:(fieldcount(U) - 1)) : typeIds
-    len = length(x)
-    types = Vector{UInt8}(undef, len)
-    offsets = Vector{Int32}(undef, len)
-    data = Tuple(Vector{i == 1 ? Union{Missing, fieldtype(U, i)} : fieldtype(U, i)}(undef, 0) for i = 1:fieldcount(U))
-    for (i, y) in enumerate(x)
-        typeid = y === missing ? 0x00 : UInt8(typeids[isatypeid(y, U)])
-        @inbounds types[i] = typeid
-        @inbounds offsets[i] = length(data[typeid + 1])
-        push!(data[typeid + 1], y)
-    end
-    return types, offsets, data
-end
-
-struct SparseUnionVector{T, U} <: AbstractVector{UnionT{Meta.UnionMode.Sparse, nothing, U}}
-    itr::T
-end
-
-SparseUnionVector(x::T) where {T} = SparseUnionVector{T, Tuple{eachunion(eltype(x))...}}(x)
-Base.IndexStyle(::Type{<:SparseUnionVector}) = Base.IndexLinear()
-Base.size(x::SparseUnionVector) = (length(x.itr),)
-Base.iterate(x::SparseUnionVector, st...) = iterate(x.itr, st...)
-Base.getindex(x::SparseUnionVector, i::Int) = getindex(x.itr, i)
-
-# sparse union child array producer
-# for sparse unions, we split the parent array into
-# N children arrays, each having the same length as the parent
-# but with one child array per unioned type; each child
-# should include the elements from parent of its type
-# and other elements can be missing/default
-function sparsetypeids(::Type{UnionT{T, typeIds, U}}, x) where {T, typeIds, U}
-    typeids = typeIds === nothing ? (0:(fieldcount(U) - 1)) : typeIds
-    len = length(x)
-    types = Vector{UInt8}(undef, len)
-    for (i, y) in enumerate(x)
-        typeid = y === missing ? 0x00 : UInt8(typeids[isatypeid(y, U)])
-        @inbounds types[i] = typeid
-    end
-    return types
-end
-
-struct ToSparseUnion{T, A} <: AbstractVector{T}
-    data::A
-end
-
-ToSparseUnion(::Type{T}, data::A) where {T, A} = ToSparseUnion{T, A}(data)
-
-Base.IndexStyle(::Type{<:ToSparseUnion}) = Base.IndexLinear()
-Base.size(x::ToSparseUnion) = (length(x.data),)
-
-Base.@propagate_inbounds function Base.getindex(A::ToSparseUnion{T}, i::Integer) where {T}
-    @boundscheck checkbounds(A, i)
-    @inbounds x = A.data[i]
-    return @inbounds x isa T ? x : ArrowTypes.default(T)
-end
-
-function compress(Z::Meta.CompressionType, comp, x::A) where {A <: DenseUnion}
-    len = length(x)
-    nc = nullcount(x)
-    typeIds = compress(Z, comp, x.typeIds)
-    offsets = compress(Z, comp, x.offsets)
-    buffers = [typeIds, offsets]
-    children = Compressed[]
-    for y in x.data
-        push!(children, compress(Z, comp, y))
-    end
-    return Compressed{Z, A}(x, buffers, len, nc, children)
-end
-
-"""
-    Arrow.SparseUnion
-
-An `ArrowVector` where the type of each element is one of a fixed set of types, meaning its eltype is like a julia `Union{type1, type2, ...}`.
-An `Arrow.SparseUnion`, in comparison to `Arrow.DenseUnion`, stores elements in a set of arrays, one array per possible type, and each typed
-array has the same length as the full array. This ends up with "wasted" space, since only one slot among the typed arrays is valid per full
-array element, but can allow for certain optimizations when each typed array has the same length.
-"""
-struct SparseUnion{T, S} <: ArrowVector{T}
-    arrow::Vector{UInt8} # need to hold a reference to arrow memory blob
-    typeIds::Vector{UInt8}
-    data::S # Tuple of ArrowVector
-    metadata::Union{Nothing, Dict{String, String}}
-end
-
-Base.size(s::SparseUnion) = size(s.typeIds)
-nullcount(x::SparseUnion) = 0
-
-@propagate_inbounds function Base.getindex(s::SparseUnion{T}, i::Integer) where {T}
-    @boundscheck checkbounds(s, i)
-    @inbounds typeId = s.typeIds[i]
-    @inbounds x = s.data[typeId + 1][i]
-    return x
-end
-
-@propagate_inbounds function Base.setindex!(s::SparseUnion{UnionT{T, typeIds, U}}, v, i::Integer) where {T, typeIds, U}
-    @boundscheck checkbounds(s, i)
-    typeids = typeIds === nothing ? (0:(fieldcount(U) - 1)) : typeIds
-    vtypeId = Int8(typeids[isatypeid(v, U)])
-    @inbounds s.typeIds[i] = vtypeId
-    @inbounds s.data[vtypeId + 1][i] = v
-    return v
-end
-
-arrowvector(U::Union, x, i, nl, fi, de, ded, meta; denseunions::Bool=true, kw...) =
-    arrowvector(denseunions ? DenseUnionVector(x) : SparseUnionVector(x), i, nl, fi, de, ded, meta; denseunions=denseunions, kw...)
-
-arrowvector(::UnionType, x::Union{DenseUnion, SparseUnion}, i, nl, fi, de, ded, meta; kw...) = x
-
-function arrowvector(::UnionType, x, i, nl, fi, de, ded, meta; kw...)
-    UT = eltype(x)
-    if unionmode(UT) == Meta.UnionMode.Dense
-        x = x isa DenseUnionVector ? x.itr : x
-        typeids, offsets, data = todense(UT, x)
-        data2 = map(y -> arrowvector(y[2], i, nl + 1, y[1], de, ded, nothing; kw...), enumerate(data))
-        return DenseUnion{UT, typeof(data2)}(UInt8[], UInt8[], typeids, offsets, data2, meta)
-    else
-        x = x isa SparseUnionVector ? x.itr : x
-        typeids = sparsetypeids(UT, x)
-        data3 = Tuple(arrowvector(ToSparseUnion(fieldtype(eltype(UT), j), x), i, nl + 1, j, de, ded, nothing; kw...) for j = 1:fieldcount(eltype(UT)))
-        return SparseUnion{UT, typeof(data3)}(UInt8[], typeids, data3, meta)
-    end
-end
-
-function compress(Z::Meta.CompressionType, comp, x::A) where {A <: SparseUnion}
-    len = length(x)
-    nc = nullcount(x)
-    typeIds = compress(Z, comp, x.typeIds)
-    buffers = [typeIds]
-    children = Compressed[]
-    for y in x.data
-        push!(children, compress(Z, comp, y))
-    end
-    return Compressed{Z, A}(x, buffers, len, nc, children)
-end
-
-function makenodesbuffers!(col::Union{DenseUnion, SparseUnion}, fieldnodes, fieldbuffers, bufferoffset, alignment)
-    len = length(col)
-    nc = nullcount(col)
-    push!(fieldnodes, FieldNode(len, nc))
-    @debug 1 "made field node: nodeidx = $(length(fieldnodes)), col = $(typeof(col)), len = $(fieldnodes[end].length), nc = $(fieldnodes[end].null_count)"
-    # typeIds buffer
-    push!(fieldbuffers, Buffer(bufferoffset, len))
-    @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-    bufferoffset += padding(len, alignment)
-    if col isa DenseUnion
-        # offsets buffer
-        blen = sizeof(Int32) * len
-        push!(fieldbuffers, Buffer(bufferoffset, blen))
-        @debug 1 "made field buffer: bufferidx = $(length(fieldbuffers)), offset = $(fieldbuffers[end].offset), len = $(fieldbuffers[end].length), padded = $(padding(fieldbuffers[end].length, alignment))"
-        bufferoffset += padding(blen, alignment)
-    end
-    for child in col.data
-        bufferoffset = makenodesbuffers!(child, fieldnodes, fieldbuffers, bufferoffset, alignment)
-    end
-    return bufferoffset
-end
-
-function writebuffer(io, col::Union{DenseUnion, SparseUnion}, alignment)
-    @debug 1 "writebuffer: col = $(typeof(col))"
-    @debug 2 col
-    # typeIds buffer
-    n = writearray(io, UInt8, col.typeIds)
-    @debug 1 "writing array: col = $(typeof(col.typeIds)), n = $n, padded = $(padding(n, alignment))"
-    writezeros(io, paddinglength(n, alignment))
-    if col isa DenseUnion
-        n = writearray(io, Int32, col.offsets)
-        @debug 1 "writing array: col = $(typeof(col.offsets)), n = $n, padded = $(padding(n, alignment))"
-        writezeros(io, paddinglength(n, alignment))
-    end
-    for child in col.data
-        writebuffer(io, child, alignment)
-    end
-    return
-end
diff --git a/julia/Arrow/src/arrowtypes.jl b/julia/Arrow/src/arrowtypes.jl
deleted file mode 100644
index 55f586e..0000000
--- a/julia/Arrow/src/arrowtypes.jl
+++ /dev/null
@@ -1,166 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-The ArrowTypes module provides the [`ArrowTypes.Arrowtype`](@ref) interface trait that objects can define
-in order to signal how they should be serialized in the arrow format.
-"""
-module ArrowTypes
-
-using UUIDs
-
-export ArrowType, NullType, PrimitiveType, BoolType, ListType, FixedSizeListType, MapType, StructType, UnionType, DictEncodedType
-
-abstract type ArrowType end
-
-ArrowType(x::T) where {T} = ArrowType(T)
-ArrowType(::Type{T}) where {T} = isprimitivetype(T) ? PrimitiveType() : StructType()
-
-function arrowconvert end
-
-arrowconvert(T, x) = convert(T, x)
-arrowconvert(::Type{Union{T, Missing}}, x) where {T} = arrowconvert(T, x)
-arrowconvert(::Type{Union{T, Missing}}, ::Missing) where {T} = missing
-
-struct NullType <: ArrowType end
-
-ArrowType(::Type{Missing}) = NullType()
-
-struct PrimitiveType <: ArrowType end
-
-ArrowType(::Type{<:Integer}) = PrimitiveType()
-ArrowType(::Type{<:AbstractFloat}) = PrimitiveType()
-
-arrowconvert(::Type{UInt128}, u::UUID) = UInt128(u)
-arrowconvert(::Type{UUID}, u::UInt128) = UUID(u)
-
-# This method is included as a deprecation path to allow reading Arrow files that may have
-# been written before Arrow.jl defined its own UUID <-> UInt128 mapping (in which case
-# a struct-based fallback `JuliaLang.UUID` extension type may have been utilized)
-arrowconvert(::Type{UUID}, u::NamedTuple{(:value,),Tuple{UInt128}}) = UUID(u.value)
-
-struct BoolType <: ArrowType end
-ArrowType(::Type{Bool}) = BoolType()
-
-struct ListType <: ArrowType end
-
-# isstringtype MUST BE UTF8 (other codeunit sizes not supported; arrow encoding for strings is specifically UTF8)
-isstringtype(T) = false
-isstringtype(::Type{Union{T, Missing}}) where {T} = isstringtype(T)
-
-ArrowType(::Type{<:AbstractString}) = ListType()
-isstringtype(::Type{<:AbstractString}) = true
-
-ArrowType(::Type{Symbol}) = ListType()
-isstringtype(::Type{Symbol}) = true
-arrowconvert(::Type{Symbol}, x::String) = Symbol(x)
-arrowconvert(::Type{String}, x::Symbol) = String(x)
-
-ArrowType(::Type{<:AbstractArray}) = ListType()
-
-struct FixedSizeListType <: ArrowType end
-
-ArrowType(::Type{NTuple{N, T}}) where {N, T} = FixedSizeListType()
-gettype(::Type{NTuple{N, T}}) where {N, T} = T
-getsize(::Type{NTuple{N, T}}) where {N, T} = N
-
-struct StructType <: ArrowType end
-
-ArrowType(::Type{<:NamedTuple}) = StructType()
-
-@enum STRUCT_TYPES NAMEDTUPLE STRUCT # KEYWORDARGS
-
-structtype(::Type{NamedTuple{N, T}}) where {N, T} = NAMEDTUPLE
-structtype(::Type{T}) where {T} = STRUCT
-
-# must implement keytype, valtype
-struct MapType <: ArrowType end
-
-ArrowType(::Type{<:AbstractDict}) = MapType()
-
-struct UnionType <: ArrowType end
-
-ArrowType(::Union) = UnionType()
-
-struct DictEncodedType <: ArrowType end
-
-"""
-There are a couple places when writing arrow buffers where
-we need to write a "dummy" value; it doesn't really matter
-what we write, but we need to write something of a specific
-type. So each supported writing type needs to define `default`.
-"""
-function default end
-
-default(T) = zero(T)
-default(::Type{Symbol}) = Symbol()
-default(::Type{Char}) = '\0'
-default(::Type{<:AbstractString}) = ""
-default(::Type{Union{T, Missing}}) where {T} = default(T)
-
-function default(::Type{A}) where {A <: AbstractVector{T}} where {T}
-    a = similar(A, 1)
-    a[1] = default(T)
-    return a
-end
-
-default(::Type{NTuple{N, T}}) where {N, T} = ntuple(i -> default(T), N)
-default(::Type{T}) where {T <: Tuple} = Tuple(default(fieldtype(T, i)) for i = 1:fieldcount(T))
-default(::Type{Dict{K, V}}) where {K, V} = Dict{K, V}()
-default(::Type{NamedTuple{names, types}}) where {names, types} = NamedTuple{names}(Tuple(default(fieldtype(types, i)) for i = 1:length(names)))
-
-const JULIA_TO_ARROW_TYPE_MAPPING = Dict{Type, Tuple{String, Type}}(
-    Char => ("JuliaLang.Char", UInt32),
-    Symbol => ("JuliaLang.Symbol", String),
-    UUID => ("JuliaLang.UUID", UInt128),
-)
-
-istyperegistered(::Type{T}) where {T} = haskey(JULIA_TO_ARROW_TYPE_MAPPING, T)
-
-function getarrowtype!(meta, ::Type{T}) where {T}
-    arrowname, arrowtype = JULIA_TO_ARROW_TYPE_MAPPING[T]
-    meta["ARROW:extension:name"] = arrowname
-    meta["ARROW:extension:metadata"] = ""
-    return arrowtype
-end
-
-const ARROW_TO_JULIA_TYPE_MAPPING = Dict{String, Tuple{Type, Type}}(
-    "JuliaLang.Char" => (Char, UInt32),
-    "JuliaLang.Symbol" => (Symbol, String),
-    "JuliaLang.UUID" => (UUID, UInt128),
-)
-
-function extensiontype(f, meta)
-    if haskey(meta, "ARROW:extension:name")
-        typename = meta["ARROW:extension:name"]
-        if haskey(ARROW_TO_JULIA_TYPE_MAPPING, typename)
-            T = ARROW_TO_JULIA_TYPE_MAPPING[typename][1]
-            return f.nullable ? Union{T, Missing} : T
-        else
-            @warn "unsupported ARROW:extension:name type: \"$typename\""
-        end
-    end
-    return nothing
-end
-
-function registertype!(juliatype::Type, arrowtype::Type, arrowname::String=string("JuliaLang.", string(juliatype)))
-    # TODO: validate that juliatype isn't already default arrow type
-    JULIA_TO_ARROW_TYPE_MAPPING[juliatype] = (arrowname, arrowtype)
-    ARROW_TO_JULIA_TYPE_MAPPING[arrowname] = (juliatype, arrowtype)
-    return
-end
-
-end # module ArrowTypes
\ No newline at end of file
diff --git a/julia/Arrow/src/eltypes.jl b/julia/Arrow/src/eltypes.jl
deleted file mode 100644
index 0a6a74d..0000000
--- a/julia/Arrow/src/eltypes.jl
+++ /dev/null
@@ -1,415 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Given a flatbuffers metadata type definition (a Field instance from Schema.fbs),
-translate to the appropriate Julia storage eltype
-"""
-function juliaeltype end
-
-finaljuliatype(T) = T
-finaljuliatype(::Type{Missing}) = Missing
-finaljuliatype(::Type{Union{T, Missing}}) where {T} = Union{Missing, finaljuliatype(T)}
-
-"""
-Given a FlatBuffers.Builder and a Julia column or column eltype,
-Write the field.type flatbuffer definition of the eltype
-"""
-function arrowtype end
-
-arrowtype(b, col::AbstractVector{T}) where {T} = arrowtype(b, maybemissing(T))
-arrowtype(b, col::DictEncoded) = arrowtype(b, col.encoding.data)
-arrowtype(b, col::Compressed) = arrowtype(b, col.data)
-
-function juliaeltype(f::Meta.Field, ::Nothing, convert::Bool)
-    T = juliaeltype(f, convert)
-    return convert ? finaljuliatype(T) : T
-end
-
-function juliaeltype(f::Meta.Field, meta::Dict{String, String}, convert::Bool)
-    TT = juliaeltype(f, convert)
-    !convert && return TT
-    T = finaljuliatype(TT)
-    TTT = ArrowTypes.extensiontype(f, meta)
-    return something(TTT, T)
-end
-
-function juliaeltype(f::Meta.Field, convert::Bool)
-    T = juliaeltype(f, f.type, convert)
-    return (f.nullable ? Union{T, Missing} : T)
-end
-
-juliaeltype(f::Meta.Field, ::Meta.Null, convert) = Missing
-
-function arrowtype(b, ::Type{Missing})
-    Meta.nullStart(b)
-    return Meta.Null, Meta.nullEnd(b), nothing
-end
-
-function juliaeltype(f::Meta.Field, int::Meta.Int, convert)
-    if int.is_signed
-        if int.bitWidth == 8
-            Int8
-        elseif int.bitWidth == 16
-            Int16
-        elseif int.bitWidth == 32
-            Int32
-        elseif int.bitWidth == 64
-            Int64
-        elseif int.bitWidth == 128
-            Int128
-        else
-            error("$int is not valid arrow type metadata")
-        end
-    else
-        if int.bitWidth == 8
-            UInt8
-        elseif int.bitWidth == 16
-            UInt16
-        elseif int.bitWidth == 32
-            UInt32
-        elseif int.bitWidth == 64
-            UInt64
-        elseif int.bitWidth == 128
-            UInt128
-        else
-            error("$int is not valid arrow type metadata")
-        end
-    end
-end
-
-function arrowtype(b, ::Type{T}) where {T <: Integer}
-    Meta.intStart(b)
-    Meta.intAddBitWidth(b, Int32(8 * sizeof(T)))
-    Meta.intAddIsSigned(b, T <: Signed)
-    return Meta.Int, Meta.intEnd(b), nothing
-end
-
-# primitive types
-function juliaeltype(f::Meta.Field, fp::Meta.FloatingPoint, convert)
-    if fp.precision == Meta.Precision.HALF
-        Float16
-    elseif fp.precision == Meta.Precision.SINGLE
-        Float32
-    elseif fp.precision == Meta.Precision.DOUBLE
-        Float64
-    end
-end
-
-function arrowtype(b, ::Type{T}) where {T <: AbstractFloat}
-    Meta.floatingPointStart(b)
-    Meta.floatingPointAddPrecision(b, T === Float16 ? Meta.Precision.HALF : T === Float32 ? Meta.Precision.SINGLE : Meta.Precision.DOUBLE)
-    return Meta.FloatingPoint, Meta.floatingPointEnd(b), nothing
-end
-
-juliaeltype(f::Meta.Field, b::Union{Meta.Utf8, Meta.LargeUtf8}, convert) = String
-
-datasizeof(x) = sizeof(x)
-datasizeof(x::AbstractVector) = sum(datasizeof, x)
-
-juliaeltype(f::Meta.Field, b::Union{Meta.Binary, Meta.LargeBinary}, convert) = Vector{UInt8}
-
-juliaeltype(f::Meta.Field, x::Meta.FixedSizeBinary, convert) = NTuple{Int(x.byteWidth), UInt8}
-
-# arggh!
-Base.write(io::IO, x::NTuple{N, T}) where {N, T} = sum(y -> Base.write(io, y), x)
-
-juliaeltype(f::Meta.Field, x::Meta.Bool, convert) = Bool
-
-function arrowtype(b, ::Type{Bool})
-    Meta.boolStart(b)
-    return Meta.Bool, Meta.boolEnd(b), nothing
-end
-
-struct Decimal{P, S, T}
-    value::T # only Int128 or Int256
-end
-
-Base.zero(::Type{Decimal{P, S, T}}) where {P, S, T} = Decimal{P, S, T}(T(0))
-==(a::Decimal{P, S, T}, b::Decimal{P, S, T}) where {P, S, T} = ==(a.value, b.value)
-Base.isequal(a::Decimal{P, S, T}, b::Decimal{P, S, T}) where {P, S, T} = isequal(a.value, b.value)
-
-function juliaeltype(f::Meta.Field, x::Meta.Decimal, convert)
-    return Decimal{x.precision, x.scale, x.bitWidth == 256 ? Int256 : Int128}
-end
-
-ArrowTypes.ArrowType(::Type{<:Decimal}) = PrimitiveType()
-
-function arrowtype(b, ::Type{Decimal{P, S, T}}) where {P, S, T}
-    Meta.decimalStart(b)
-    Meta.decimalAddPrecision(b, Int32(P))
-    Meta.decimalAddScale(b, Int32(S))
-    Meta.decimalAddBitWidth(b, Int32(T == Int256 ? 256 : 128))
-    return Meta.Decimal, Meta.decimalEnd(b), nothing
-end
-
-Base.write(io::IO, x::Decimal) = Base.write(io, x.value)
-
-abstract type ArrowTimeType end
-Base.write(io::IO, x::ArrowTimeType) = Base.write(io, x.x)
-ArrowTypes.ArrowType(::Type{<:ArrowTimeType}) = PrimitiveType()
-
-struct Date{U, T} <: ArrowTimeType
-    x::T
-end
-
-Base.zero(::Type{Date{U, T}}) where {U, T} = Date{U, T}(T(0))
-storagetype(::Type{Date{U, T}}) where {U, T} = T
-bitwidth(x::Meta.DateUnit) = x == Meta.DateUnit.DAY ? Int32 : Int64
-Date{Meta.DateUnit.DAY}(days) = Date{Meta.DateUnit.DAY, Int32}(Int32(days))
-Date{Meta.DateUnit.MILLISECOND}(ms) = Date{Meta.DateUnit.MILLISECOND, Int64}(Int64(ms))
-const DATE = Date{Meta.DateUnit.DAY, Int32}
-
-juliaeltype(f::Meta.Field, x::Meta.Date, convert) = Date{x.unit, bitwidth(x.unit)}
-finaljuliatype(::Type{Date{Meta.DateUnit.DAY, Int32}}) = Dates.Date
-Base.convert(::Type{Dates.Date}, x::Date{Meta.DateUnit.DAY, Int32}) = Dates.Date(Dates.UTD(Int64(x.x + UNIX_EPOCH_DATE)))
-finaljuliatype(::Type{Date{Meta.DateUnit.MILLISECOND, Int64}}) = Dates.DateTime
-Base.convert(::Type{Dates.DateTime}, x::Date{Meta.DateUnit.MILLISECOND, Int64}) = Dates.DateTime(Dates.UTM(Int64(x.x + UNIX_EPOCH_DATETIME)))
-
-function arrowtype(b, ::Type{Date{U, T}}) where {U, T}
-    Meta.dateStart(b)
-    Meta.dateAddUnit(b, U)
-    return Meta.Date, Meta.dateEnd(b), nothing
-end
-
-const UNIX_EPOCH_DATE = Dates.value(Dates.Date(1970))
-Base.convert(::Type{Date{Meta.DateUnit.DAY, Int32}}, x::Dates.Date) = Date{Meta.DateUnit.DAY, Int32}(Int32(Dates.value(x) - UNIX_EPOCH_DATE))
-
-const UNIX_EPOCH_DATETIME = Dates.value(Dates.DateTime(1970))
-Base.convert(::Type{Date{Meta.DateUnit.MILLISECOND, Int64}}, x::Dates.DateTime) = Date{Meta.DateUnit.MILLISECOND, Int64}(Int64(Dates.value(x) - UNIX_EPOCH_DATETIME))
-
-struct Time{U, T} <: ArrowTimeType
-    x::T
-end
-
-Base.zero(::Type{Time{U, T}}) where {U, T} = Time{U, T}(T(0))
-const TIME = Time{Meta.TimeUnit.NANOSECOND, Int64}
-
-bitwidth(x::Meta.TimeUnit) = x == Meta.TimeUnit.SECOND || x == Meta.TimeUnit.MILLISECOND ? Int32 : Int64
-Time{U}(x) where {U <: Meta.TimeUnit} = Time{U, bitwidth(U)}(bitwidth(U)(x))
-storagetype(::Type{Time{U, T}}) where {U, T} = T
-juliaeltype(f::Meta.Field, x::Meta.Time, convert) = Time{x.unit, bitwidth(x.unit)}
-finaljuliatype(::Type{<:Time}) = Dates.Time
-periodtype(U::Meta.TimeUnit) = U === Meta.TimeUnit.SECOND ? Dates.Second :
-                               U === Meta.TimeUnit.MILLISECOND ? Dates.Millisecond :
-                               U === Meta.TimeUnit.MICROSECOND ? Dates.Microsecond : Dates.Nanosecond
-Base.convert(::Type{Dates.Time}, x::Time{U, T}) where {U, T} = Dates.Time(Dates.Nanosecond(Dates.tons(periodtype(U)(x.x))))
-
-function arrowtype(b, ::Type{Time{U, T}}) where {U, T}
-    Meta.timeStart(b)
-    Meta.timeAddUnit(b, U)
-    Meta.timeAddBitWidth(b, Int32(8 * sizeof(T)))
-    return Meta.Time, Meta.timeEnd(b), nothing
-end
-
-Base.convert(::Type{Time{Meta.TimeUnit.NANOSECOND, Int64}}, x::Dates.Time) = Time{Meta.TimeUnit.NANOSECOND, Int64}(Dates.value(x))
-
-struct Timestamp{U, TZ} <: ArrowTimeType
-    x::Int64
-end
-
-Base.zero(::Type{Timestamp{U, T}}) where {U, T} = Timestamp{U, T}(Int64(0))
-
-function juliaeltype(f::Meta.Field, x::Meta.Timestamp, convert)
-    return Timestamp{x.unit, x.timezone === nothing ? nothing : Symbol(x.timezone)}
-end
-
-const DATETIME = Timestamp{Meta.TimeUnit.MILLISECOND, nothing}
-
-finaljuliatype(::Type{Timestamp{U, TZ}}) where {U, TZ} = ZonedDateTime
-finaljuliatype(::Type{Timestamp{U, nothing}}) where {U} = DateTime
-Base.convert(::Type{ZonedDateTime}, x::Timestamp{U, TZ}) where {U, TZ} =
-    ZonedDateTime(Dates.DateTime(Dates.UTM(Int64(Dates.toms(periodtype(U)(x.x)) + UNIX_EPOCH_DATETIME))), TimeZone(String(TZ)))
-Base.convert(::Type{DateTime}, x::Timestamp{U, nothing}) where {U} =
-    Dates.DateTime(Dates.UTM(Int64(Dates.toms(periodtype(U)(x.x)) + UNIX_EPOCH_DATETIME)))
-Base.convert(::Type{Timestamp{Meta.TimeUnit.MILLISECOND, TZ}}, x::ZonedDateTime) where {TZ} =
-    Timestamp{Meta.TimeUnit.MILLISECOND, TZ}(Int64(Dates.value(DateTime(x)) - UNIX_EPOCH_DATETIME))
-Base.convert(::Type{Timestamp{Meta.TimeUnit.MILLISECOND, nothing}}, x::DateTime) =
-    Timestamp{Meta.TimeUnit.MILLISECOND, nothing}(Int64(Dates.value(x) - UNIX_EPOCH_DATETIME))
-
-function arrowtype(b, ::Type{Timestamp{U, TZ}}) where {U, TZ}
-    tz = TZ !== nothing ? FlatBuffers.createstring!(b, String(TZ)) : FlatBuffers.UOffsetT(0)
-    Meta.timestampStart(b)
-    Meta.timestampAddUnit(b, U)
-    Meta.timestampAddTimezone(b, tz)
-    return Meta.Timestamp, Meta.timestampEnd(b), nothing
-end
-
-struct Interval{U, T} <: ArrowTimeType
-    x::T
-end
-
-Base.zero(::Type{Interval{U, T}}) where {U, T} = Interval{U, T}(T(0))
-
-bitwidth(x::Meta.IntervalUnit) = x == Meta.IntervalUnit.YEAR_MONTH ? Int32 : Int64
-Interval{Meta.IntervalUnit.YEAR_MONTH}(x) = Interval{Meta.IntervalUnit.YEAR_MONTH, Int32}(Int32(x))
-Interval{Meta.IntervalUnit.DAY_TIME}(x) = Interval{Meta.IntervalUnit.DAY_TIME, Int64}(Int64(x))
-
-function juliaeltype(f::Meta.Field, x::Meta.Interval, convert)
-    return Interval{x.unit, bitwidth(x.unit)}
-end
-
-function arrowtype(b, ::Type{Interval{U, T}}) where {U, T}
-    Meta.intervalStart(b)
-    Meta.intervalAddUnit(b, U)
-    return Meta.Interval, Meta.intervalEnd(b), nothing
-end
-
-struct Duration{U} <: ArrowTimeType
-    x::Int64
-end
-
-Base.zero(::Type{Duration{U}}) where {U} = Duration{U}(Int64(0))
-
-function juliaeltype(f::Meta.Field, x::Meta.Duration, convert)
-    return Duration{x.unit}
-end
-
-finaljuliatype(::Type{Duration{U}}) where {U} = periodtype(U)
-Base.convert(::Type{P}, x::Duration{U}) where {P <: Dates.Period, U} = P(periodtype(U)(x.x))
-
-function arrowtype(b, ::Type{Duration{U}}) where {U}
-    Meta.durationStart(b)
-    Meta.durationAddUnit(b, U)
-    return Meta.Duration, Meta.durationEnd(b), nothing
-end
-
-arrowperiodtype(P) = Meta.TimeUnit.SECOND
-arrowperiodtype(::Type{Dates.Millisecond}) = Meta.TimeUnit.MILLISECOND
-arrowperiodtype(::Type{Dates.Microsecond}) = Meta.TimeUnit.MICROSECOND
-arrowperiodtype(::Type{Dates.Nanosecond}) = Meta.TimeUnit.NANOSECOND
-
-Base.convert(::Type{Duration{U}}, x::Dates.Period) where {U} = Duration{U}(Dates.value(periodtype(U)(x)))
-
-# nested types; call juliaeltype recursively on nested children
-function juliaeltype(f::Meta.Field, list::Union{Meta.List, Meta.LargeList}, convert)
-    return Vector{juliaeltype(f.children[1], buildmetadata(f.children[1]), convert)}
-end
-
-# arrowtype will call fieldoffset recursively for children
-function arrowtype(b, x::List{T, O, A}) where {T, O, A}
-    if eltype(A) == UInt8
-        if T <: AbstractString || T <: Union{AbstractString, Missing}
-            if O == Int32
-                Meta.utf8Start(b)
-                return Meta.Utf8, Meta.utf8End(b), nothing
-            else # if O == Int64
-                Meta.largUtf8Start(b)
-                return Meta.LargeUtf8, Meta.largUtf8End(b), nothing
-            end
-        else # if Vector{UInt8}
-            if O == Int32
-                Meta.binaryStart(b)
-                return Meta.Binary, Meta.binaryEnd(b), nothing
-            else # if O == Int64
-                Meta.largeBinaryStart(b)
-                return Meta.LargeBinary, Meta.largeBinaryEnd(b), nothing
-            end
-        end
-    else
-        children = [fieldoffset(b, "", x.data)]
-        if O == Int32
-            Meta.listStart(b)
-            return Meta.List, Meta.listEnd(b), children
-        else
-            Meta.largeListStart(b)
-            return Meta.LargeList, Meta.largeListEnd(b), children
-        end
-    end
-end
-
-function juliaeltype(f::Meta.Field, list::Meta.FixedSizeList, convert)
-    type = juliaeltype(f.children[1], buildmetadata(f.children[1]), convert)
-    return NTuple{Int(list.listSize), type}
-end
-
-function arrowtype(b, x::FixedSizeList{T, A}) where {T, A}
-    N = ArrowTypes.getsize(Base.nonmissingtype(T))
-    if eltype(A) == UInt8
-        Meta.fixedSizeBinaryStart(b)
-        Meta.fixedSizeBinaryAddByteWidth(b, Int32(N))
-        return Meta.FixedSizeBinary, Meta.fixedSizeBinaryEnd(b), nothing
-    else
-        children = [fieldoffset(b, "", x.data)]
-        Meta.fixedSizeListStart(b)
-        Meta.fixedSizeListAddListSize(b, Int32(N))
-        return Meta.FixedSizeList, Meta.fixedSizeListEnd(b), children
-    end
-end
-
-function juliaeltype(f::Meta.Field, map::Meta.Map, convert)
-    K = juliaeltype(f.children[1].children[1], buildmetadata(f.children[1].children[1]), convert)
-    V = juliaeltype(f.children[1].children[2], buildmetadata(f.children[1].children[2]), convert)
-    return Dict{K, V}
-end
-
-function arrowtype(b, x::Map)
-    children = [fieldoffset(b, "entries", x.data)]
-    Meta.mapStart(b)
-    return Meta.Map, Meta.mapEnd(b), children
-end
-
-struct KeyValue{K, V}
-    key::K
-    value::V
-end
-keyvalueK(::Type{KeyValue{K, V}}) where {K, V} = K
-keyvalueV(::Type{KeyValue{K, V}}) where {K, V} = V
-Base.length(kv::KeyValue) = 1
-Base.iterate(kv::KeyValue, st=1) = st === nothing ? nothing : (kv, nothing)
-ArrowTypes.default(::Type{KeyValue{K, V}}) where {K, V} = KeyValue(default(K), default(V))
-
-function arrowtype(b, ::Type{KeyValue{K, V}}) where {K, V}
-    children = [fieldoffset(b, "key", K), fieldoffset(b, "value", V)]
-    Meta.structStart(b)
-    return Meta.Struct, Meta.structEnd(b), children
-end
-
-function juliaeltype(f::Meta.Field, list::Meta.Struct, convert)
-    names = Tuple(Symbol(x.name) for x in f.children)
-    types = Tuple(juliaeltype(x, buildmetadata(x), convert) for x in f.children)
-    return NamedTuple{names, Tuple{types...}}
-end
-
-function arrowtype(b, x::Struct{T, S}) where {T, S}
-    names = fieldnames(Base.nonmissingtype(T))
-    children = [fieldoffset(b, names[i], x.data[i]) for i = 1:length(names)]
-    Meta.structStart(b)
-    return Meta.Struct, Meta.structEnd(b), children
-end
-
-# Unions
-function juliaeltype(f::Meta.Field, u::Meta.Union, convert)
-    return Union{(juliaeltype(x, buildmetadata(x), convert) for x in f.children)...}
-end
-
-arrowtype(b, x::Union{DenseUnion{TT, S}, SparseUnion{TT, S}}) where {TT, S} = arrowtype(b, TT, x)
-function arrowtype(b, ::Type{UnionT{T, typeIds, U}}, x::Union{DenseUnion{TT, S}, SparseUnion{TT, S}}) where {T, typeIds, U, TT, S}
-    if typeIds !== nothing
-        Meta.unionStartTypeIdsVector(b, length(typeIds))
-        for id in Iterators.reverse(typeIds)
-            FlatBuffers.prepend!(b, id)
-        end
-        TI = FlatBuffers.endvector!(b, length(typeIds))
-    end
-    children = [fieldoffset(b, "", x.data[i]) for i = 1:fieldcount(U)]
-    Meta.unionStart(b)
-    Meta.unionAddMode(b, T)
-    if typeIds !== nothing
-        Meta.unionAddTypeIds(b, TI)
-    end
-    return Meta.Union, Meta.unionEnd(b), children
-end
diff --git a/julia/Arrow/src/metadata/File.jl b/julia/Arrow/src/metadata/File.jl
deleted file mode 100644
index 8c1622c..0000000
--- a/julia/Arrow/src/metadata/File.jl
+++ /dev/null
@@ -1,90 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-struct Footer <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Footer) = (:version, :schema, :dictionaries, :recordBatches, :custom_metadata)
-
-function Base.getproperty(x::Footer, field::Symbol)
-    if field === :version
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), MetadataVersion)
-        return MetadataVersion.V1
-    elseif field === :schema
-        o = FlatBuffers.offset(x, 6)
-        if o != 0
-            y = FlatBuffers.indirect(x, o + FlatBuffers.pos(x))
-            return FlatBuffers.init(Schema, FlatBuffers.bytes(x), y)
-        end
-    elseif field === :dictionaries
-        o = FlatBuffers.offset(x, 8)
-        if o != 0
-            return FlatBuffers.Array{Block}(x, o)
-        end
-    elseif field === :recordBatches
-        o = FlatBuffers.offset(x, 10)
-        if o != 0
-            return FlatBuffers.Array{Block}(x, o)
-        end
-    elseif field === :custom_metadata
-        o = FlatBuffers.offset(x, 12)
-        if o != 0
-            return FlatBuffers.Array{KeyValue}(x, o)
-        end
-    end
-    return nothing
-end
-
-footerStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 4)
-footerAddVersion(b::FlatBuffers.Builder, version::MetadataVersion) = FlatBuffers.prependslot!(b, 0, version, 0)
-footerAddSchema(b::FlatBuffers.Builder, schema::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 1, schema, 0)
-footerAddDictionaries(b::FlatBuffers.Builder, dictionaries::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 2, dictionaries, 0)
-footerStartDictionariesVector(b::FlatBuffers.Builder, numelems) = FlatBuffers.startvector!(b, 24, numelems, 8)
-footerAddRecordBatches(b::FlatBuffers.Builder, recordbatches::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 3, recordbatches, 0)
-footerStartRecordBatchesVector(b::FlatBuffers.Builder, numelems) = FlatBuffers.startvector!(b, 24, numelems, 8)
-footerEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Block <: FlatBuffers.Struct
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-FlatBuffers.structsizeof(::Base.Type{Block}) = 24
-
-Base.propertynames(x::Block) = (:offset, :metaDataLength, :bodyLength)
-
-function Base.getproperty(x::Block, field::Symbol)
-    if field === :offset
-        return FlatBuffers.get(x, FlatBuffers.pos(x), Int64)
-    elseif field === :metaDataLength
-        return FlatBuffers.get(x, FlatBuffers.pos(x) + 8, Int32)
-    elseif field === :bodyLength
-        return FlatBuffers.get(x, FlatBuffers.pos(x) + 16, Int64)
-    end
-    return nothing
-end
-
-function createBlock(b::FlatBuffers.Builder, offset::Int64, metadatalength::Int32, bodylength::Int64)
-    FlatBuffers.prep!(b, 8, 24)
-    prepend!(b, bodylength)
-    FlatBuffers.pad!(b, 4)
-    prepend!(b, metadatalength)
-    prepend!(b, offset)
-    return FlatBuffers.offset(b)
-end
\ No newline at end of file
diff --git a/julia/Arrow/src/metadata/Flatbuf.jl b/julia/Arrow/src/metadata/Flatbuf.jl
deleted file mode 100644
index a69230d..0000000
--- a/julia/Arrow/src/metadata/Flatbuf.jl
+++ /dev/null
@@ -1,25 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-module Flatbuf
-
-using ..FlatBuffers
-
-include("Schema.jl")
-include("File.jl")
-include("Message.jl")
-
-end # module
\ No newline at end of file
diff --git a/julia/Arrow/src/metadata/Message.jl b/julia/Arrow/src/metadata/Message.jl
deleted file mode 100644
index 4fe6253..0000000
--- a/julia/Arrow/src/metadata/Message.jl
+++ /dev/null
@@ -1,202 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-struct FieldNode <: FlatBuffers.Struct
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-FlatBuffers.structsizeof(::Base.Type{FieldNode}) = 16
-
-Base.propertynames(x::FieldNode) = (:length, :null_count)
-
-function Base.getproperty(x::FieldNode, field::Symbol)
-    if field === :length
-        return FlatBuffers.get(x, FlatBuffers.pos(x), Int64)
-    elseif field === :null_count
-        return FlatBuffers.get(x, FlatBuffers.pos(x) + 8, Int64)
-    end
-    return nothing
-end
-
-function createFieldNode(b::FlatBuffers.Builder, length::Int64, nullCount::Int64)
-    FlatBuffers.prep!(b, 8, 16)
-    prepend!(b, nullCount)
-    prepend!(b, length)
-    return FlatBuffers.offset(b)
-end
-
-FlatBuffers.@scopedenum CompressionType::Int8 LZ4_FRAME ZSTD
-
-FlatBuffers.@scopedenum BodyCompressionMethod::Int8 BUFFER
-
-struct BodyCompression <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::BodyCompression) = (:codec, :method)
-
-function Base.getproperty(x::BodyCompression, field::Symbol)
-    if field === :codec
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), CompressionType)
-        return CompressionType.LZ4_FRAME
-    elseif field === :method
-        o = FlatBuffers.offset(x, 6)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), BodyCompressionMethod)
-        return BodyCompressionMethod.BUFFER
-    end
-    return nothing
-end
-
-bodyCompressionStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 2)
-bodyCompressionAddCodec(b::FlatBuffers.Builder, codec::CompressionType) = FlatBuffers.prependslot!(b, 0, codec, 0)
-bodyCompressionAddMethod(b::FlatBuffers.Builder, method::BodyCompressionMethod) = FlatBuffers.prependslot!(b, 1, method, 0)
-bodyCompressionEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct RecordBatch <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::RecordBatch) = (:length, :nodes, :buffers, :compression)
-
-function Base.getproperty(x::RecordBatch, field::Symbol)
-    if field === :length
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int64)
-    elseif field === :nodes
-        o = FlatBuffers.offset(x, 6)
-        if o != 0
-            return FlatBuffers.Array{FieldNode}(x, o)
-        end
-    elseif field === :buffers
-        o = FlatBuffers.offset(x, 8)
-        if o != 0
-            return FlatBuffers.Array{Buffer}(x, o)
-        end
-    elseif field === :compression
-        o = FlatBuffers.offset(x, 10)
-        if o != 0
-            y = FlatBuffers.indirect(x, o + FlatBuffers.pos(x))
-            return FlatBuffers.init(BodyCompression, FlatBuffers.bytes(x), y)
-        end
-    end
-    return nothing
-end
-
-recordBatchStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 4)
-recordBatchAddLength(b::FlatBuffers.Builder, length::Int64) = FlatBuffers.prependslot!(b, 0, length, 0)
-recordBatchAddNodes(b::FlatBuffers.Builder, nodes::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 1, nodes, 0)
-recordBatchStartNodesVector(b::FlatBuffers.Builder, numelems) = FlatBuffers.startvector!(b, 16, numelems, 8)
-recordBatchAddBuffers(b::FlatBuffers.Builder, buffers::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 2, buffers, 0)
-recordBatchStartBuffersVector(b::FlatBuffers.Builder, numelems) = FlatBuffers.startvector!(b, 16, numelems, 8)
-recordBatchAddCompression(b::FlatBuffers.Builder, c::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 3, c, 0)
-recordBatchEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct DictionaryBatch <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::DictionaryBatch) = (:id, :data, :isDelta)
-
-function Base.getproperty(x::DictionaryBatch, field::Symbol)
-    if field === :id
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int64)
-        return Int64(0)
-    elseif field === :data
-        o = FlatBuffers.offset(x, 6)
-        if o != 0
-            y = FlatBuffers.indirect(x, o + FlatBuffers.pos(x))
-            return FlatBuffers.init(RecordBatch, FlatBuffers.bytes(x), y)
-        end
-    elseif field === :isDelta
-        o = FlatBuffers.offset(x, 8)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Base.Bool)
-        return false
-    end
-    return nothing
-end
-
-dictionaryBatchStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 3)
-dictionaryBatchAddId(b::FlatBuffers.Builder, id::Int64) = FlatBuffers.prependslot!(b, 0, id, 0)
-dictionaryBatchAddData(b::FlatBuffers.Builder, data::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 1, data, 0)
-dictionaryBatchAddIsDelta(b::FlatBuffers.Builder, isdelta::Base.Bool) = FlatBuffers.prependslot!(b, 2, isdelta, false)
-dictionaryBatchEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-function MessageHeader(b::UInt8)
-    b == 1 && return Schema
-    b == 2 && return DictionaryBatch
-    b == 3 && return RecordBatch
-    # b == 4 && return Tensor
-    # b == 5 && return SparseTensor
-    return nothing
-end
-
-function MessageHeader(::Base.Type{T})::Int16 where {T}
-    T == Schema && return 1
-    T == DictionaryBatch && return 2
-    T == RecordBatch && return 3
-    # T == Tensor && return 4
-    # T == SparseTensor && return 5
-    return 0
-end
-
-struct Message <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Message) = (:version, :header, :bodyLength, :custom_metadata)
-
-function Base.getproperty(x::Message, field::Symbol)
-    if field === :version
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), MetadataVersion)
-    elseif field === :header
-        o = FlatBuffers.offset(x, 6)
-        if o != 0
-            T = MessageHeader(FlatBuffers.get(x, o + FlatBuffers.pos(x), UInt8))
-            o = FlatBuffers.offset(x, 8)
-            pos = FlatBuffers.union(x, o)
-            if o != 0
-                return FlatBuffers.init(T, FlatBuffers.bytes(x), pos)
-            end
-        end
-    elseif field === :bodyLength
-        o = FlatBuffers.offset(x, 10)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int64)
-        return Int64(0)
-    elseif field === :custom_metadata
-        o = FlatBuffers.offset(x, 12)
-        if o != 0
-            return FlatBuffers.Array{KeyValue}(x, o)
-        end
-    end
-    return nothing
-end
-
-messageStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 5)
-messageAddVersion(b::FlatBuffers.Builder, version::MetadataVersion) = FlatBuffers.prependslot!(b, 0, version, 0)
-messageAddHeaderType(b::FlatBuffers.Builder, ::Core.Type{T}) where {T} = FlatBuffers.prependslot!(b, 1, MessageHeader(T), 0)
-messageAddHeader(b::FlatBuffers.Builder, header::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 2, header, 0)
-messageAddBodyLength(b::FlatBuffers.Builder, bodyLength::Int64) = FlatBuffers.prependslot!(b, 3, bodyLength, 0)
-messageAddCustomMetadata(b::FlatBuffers.Builder, meta::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 4, meta, 0)
-messageStartCustomMetadataVector(b::FlatBuffers.Builder, numelems) = FlatBuffers.startvector!(b, 4, numelems, 4)
-messageEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
\ No newline at end of file
diff --git a/julia/Arrow/src/metadata/Schema.jl b/julia/Arrow/src/metadata/Schema.jl
deleted file mode 100644
index 9f83bef..0000000
--- a/julia/Arrow/src/metadata/Schema.jl
+++ /dev/null
@@ -1,610 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-FlatBuffers.@scopedenum MetadataVersion::Int16 V1 V2 V3 V4 V5
-
-struct Null <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Null) = ()
-
-nullStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 0)
-nullEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Struct <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Struct) = ()
-
-structStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 0)
-structEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct List <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::List) = ()
-
-listStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 0)
-listEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct LargeList <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::LargeList) = ()
-
-largeListStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 0)
-largeListEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct FixedSizeList <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::FixedSizeList) = (:listSize,)
-
-function Base.getproperty(x::FixedSizeList, field::Symbol)
-    if field === :listSize
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int32)
-        return Int32(0)
-    end
-    return nothing
-end
-
-fixedSizeListStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 1)
-fixedSizeListAddListSize(b::FlatBuffers.Builder, listSize::Int32) = FlatBuffers.prependslot!(b, 0, listSize, 0)
-fixedSizeListEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Map <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Map) = (:keysSorted,)
-
-function Base.getproperty(x::Map, field::Symbol)
-    if field === :keysSorted
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Base.Bool)
-    end
-    return nothing
-end
-
-mapStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 1)
-mapAddKeysSorted(b::FlatBuffers.Builder, keyssorted::Base.Bool) = FlatBuffers.prependslot!(b, 0, keyssorted, 0)
-mapEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-FlatBuffers.@scopedenum UnionMode::Int16 Sparse Dense
-
-struct Union <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Union) = (:mode, :typeIds)
-
-function Base.getproperty(x::Union, field::Symbol)
-    if field === :mode
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), UnionMode)
-        return UnionMode.Sparse
-    elseif field === :typeIds
-        o = FlatBuffers.offset(x, 6)
-        o != 0 && return FlatBuffers.Array{Int32}(x, o)
-    end
-    return nothing
-end
-
-unionStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 2)
-unionAddMode(b::FlatBuffers.Builder, mode::UnionMode) = FlatBuffers.prependslot!(b, 0, mode, 0)
-unionAddTypeIds(b::FlatBuffers.Builder, typeIds::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 1, typeIds, 0)
-unionStartTypeIdsVector(b::FlatBuffers.Builder, numelems) = FlatBuffers.startvector!(b, 4, numelems, 4)
-unionEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Int <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Int) = (:bitWidth, :is_signed)
-
-function Base.getproperty(x::Int, field::Symbol)
-    if field === :bitWidth
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int32)
-    elseif field === :is_signed
-        o = FlatBuffers.offset(x, 6)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Base.Bool)
-        return false
-    end
-    return nothing
-end
-
-intStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 2)
-intAddBitWidth(b::FlatBuffers.Builder, bitwidth::Int32) = FlatBuffers.prependslot!(b, 0, bitwidth, 0)
-intAddIsSigned(b::FlatBuffers.Builder, issigned::Base.Bool) = FlatBuffers.prependslot!(b, 1, issigned, 0)
-intEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-FlatBuffers.@scopedenum Precision::Int16 HALF SINGLE DOUBLE
-
-struct FloatingPoint <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::FloatingPoint) = (:precision,)
-
-function Base.getproperty(x::FloatingPoint, field::Symbol)
-    if field === :precision
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Precision)
-        return Precision.HALF
-    end
-    return nothing
-end
-
-floatingPointStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 1)
-floatingPointAddPrecision(b::FlatBuffers.Builder, precision::Precision) = FlatBuffers.prependslot!(b, 0, precision, 0)
-floatingPointEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Utf8 <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Utf8) = ()
-
-utf8Start(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 0)
-utf8End(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Binary <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Binary) = ()
-
-binaryStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 0)
-binaryEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct LargeUtf8 <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::LargeUtf8) = ()
-
-largUtf8Start(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 0)
-largUtf8End(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct LargeBinary <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::LargeBinary) = ()
-
-largeBinaryStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 0)
-largeBinaryEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct FixedSizeBinary <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::FixedSizeBinary) = (:byteWidth,)
-
-function Base.getproperty(x::FixedSizeBinary, field::Symbol)
-    if field === :byteWidth
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int32)
-    end
-    return nothing
-end
-
-fixedSizeBinaryStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 1)
-fixedSizeBinaryAddByteWidth(b::FlatBuffers.Builder, bytewidth::Int32) = FlatBuffers.prependslot!(b, 0, bytewidth, 0)
-fixedSizeBinaryEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Bool <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Bool) = ()
-
-boolStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 0)
-boolEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Decimal <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Decimal) = (:precision, :scale, :bitWidth)
-
-function Base.getproperty(x::Decimal, field::Symbol)
-    if field === :precision
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int32)
-        return Int32(0)
-    elseif field === :scale
-        o = FlatBuffers.offset(x, 6)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int32)
-        return Int32(0)
-    elseif field === :bitWidth
-        o = FlatBuffers.offset(x, 8)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int32)
-        return Int32(128)
-    end
-    return nothing
-end
-
-decimalStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 3)
-decimalAddPrecision(b::FlatBuffers.Builder, precision::Int32) = FlatBuffers.prependslot!(b, 0, precision, 0)
-decimalAddScale(b::FlatBuffers.Builder, scale::Int32) = FlatBuffers.prependslot!(b, 1, scale, 0)
-decimalAddBitWidth(b::FlatBuffers.Builder, bitWidth::Int32) = FlatBuffers.prependslot!(b, 2, bitWidth, Int32(128))
-decimalEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-FlatBuffers.@scopedenum DateUnit::Int16 DAY MILLISECOND
-
-struct Date <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Date) = (:unit,)
-
-function Base.getproperty(x::Date, field::Symbol)
-    if field === :unit
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), DateUnit)
-        return DateUnit.MILLISECOND
-    end
-    return nothing
-end
-
-dateStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 1)
-dateAddUnit(b::FlatBuffers.Builder, unit::DateUnit) = FlatBuffers.prependslot!(b, 0, unit, 1)
-dateEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-FlatBuffers.@scopedenum TimeUnit::Int16 SECOND MILLISECOND MICROSECOND NANOSECOND
-
-struct Time <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Time) = (:unit, :bitWidth)
-
-function Base.getproperty(x::Time, field::Symbol)
-    if field === :unit
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), TimeUnit)
-        return TimeUnit.MILLISECOND
-    elseif field === :bitWidth
-        o = FlatBuffers.offset(x, 6)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int32)
-        return 32
-    end
-    return nothing
-end
-
-timeStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 2)
-timeAddUnit(b::FlatBuffers.Builder, unit::TimeUnit) = FlatBuffers.prependslot!(b, 0, unit, 1)
-timeAddBitWidth(b::FlatBuffers.Builder, bitwidth::Int32) = FlatBuffers.prependslot!(b, 1, bitwidth, 32)
-timeEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Timestamp <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Timestamp) = (:unit, :timezone)
-
-function Base.getproperty(x::Timestamp, field::Symbol)
-    if field === :unit
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), TimeUnit)
-        return TimeUnit.SECOND
-    elseif field === :timezone
-        o = FlatBuffers.offset(x, 6)
-        o != 0 && return String(x, o + FlatBuffers.pos(x))
-    end
-    return nothing
-end
-
-timestampStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 2)
-timestampAddUnit(b::FlatBuffers.Builder, unit::TimeUnit) = FlatBuffers.prependslot!(b, 0, unit, 0)
-timestampAddTimezone(b::FlatBuffers.Builder, timezone::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 1, timezone, 0)
-timestampEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-FlatBuffers.@scopedenum IntervalUnit::Int16 YEAR_MONTH DAY_TIME
-
-struct Interval <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Interval) = (:unit,)
-
-function Base.getproperty(x::Interval, field::Symbol)
-    if field === :unit
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), IntervalUnit)
-        return IntervalUnit.YEAR_MONTH
-    end
-    return nothing
-end
-
-intervalStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 1)
-intervalAddUnit(b::FlatBuffers.Builder, unit::IntervalUnit) = FlatBuffers.prependslot!(b, 0, unit, 0)
-intervalEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Duration <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Duration) = (:unit,)
-
-function Base.getproperty(x::Duration, field::Symbol)
-    if field === :unit
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), TimeUnit)
-        return TimeUnit.MILLISECOND
-    end
-    return nothing
-end
-
-durationStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 1)
-durationAddUnit(b::FlatBuffers.Builder, unit::TimeUnit) = FlatBuffers.prependslot!(b, 0, unit, 1)
-durationEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-function Type(b::UInt8)
-    b == 1 && return Null
-    b == 2 && return Int
-    b == 3 && return FloatingPoint
-    b == 4 && return Binary
-    b == 5 && return Utf8
-    b == 6 && return Bool
-    b == 7 && return Decimal
-    b == 8 && return Date
-    b == 9 && return Time
-    b == 10 && return Timestamp
-    b == 11 && return Interval
-    b == 12 && return List
-    b == 13 && return Struct
-    b == 14 && return Union
-    b == 15 && return FixedSizeBinary
-    b == 16 && return FixedSizeList
-    b == 17 && return Map
-    b == 18 && return Duration
-    b == 19 && return LargeBinary
-    b == 20 && return LargeUtf8
-    b == 21 && return LargeList
-    return nothing
-end
-
-function Type(::Base.Type{T})::Int16 where {T}
-    T == Null && return 1
-    T == Int && return 2
-    T == FloatingPoint && return 3
-    T == Binary && return 4
-    T == Utf8 && return 5
-    T == Bool && return 6
-    T == Decimal && return 7
-    T == Date && return 8
-    T == Time && return 9
-    T == Timestamp && return 10
-    T == Interval && return 11
-    T == List && return 12
-    T == Struct && return 13
-    T == Union && return 14
-    T == FixedSizeBinary && return 15
-    T == FixedSizeList && return 16
-    T == Map && return 17
-    T == Duration && return 18
-    T == LargeBinary && return 19
-    T == LargeUtf8 && return 20
-    T == LargeList && return 21
-    return 0
-end
-
-struct KeyValue <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::KeyValue) = (:key, :value)
-
-function Base.getproperty(x::KeyValue, field::Symbol)
-    if field === :key
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return String(x, o + FlatBuffers.pos(x))
-    elseif field === :value
-        o = FlatBuffers.offset(x, 6)
-        o != 0 && return String(x, o + FlatBuffers.pos(x))
-    end
-    return nothing
-end
-
-keyValueStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 2)
-keyValueAddKey(b::FlatBuffers.Builder, key::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 0, key, 0)
-keyValueAddValue(b::FlatBuffers.Builder, value::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 1, value, 0)
-keyValueEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-FlatBuffers.@scopedenum DictionaryKind::Int16 DenseArray
-
-struct DictionaryEncoding <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::DictionaryEncoding) = (:id, :indexType, :isOrdered, :dictionaryKind)
-
-function Base.getproperty(x::DictionaryEncoding, field::Symbol)
-    if field === :id
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Int64)
-        return Int64(0)
-    elseif field === :indexType
-        o = FlatBuffers.offset(x, 6)
-        if o != 0
-            y = FlatBuffers.indirect(x, o + FlatBuffers.pos(x))
-            return FlatBuffers.init(Int, FlatBuffers.bytes(x), y)
-        end
-    elseif field === :isOrdered
-        o = FlatBuffers.offset(x, 8)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Base.Bool)
-        return false
-    elseif field === :dictionaryKind
-        o = FlatBuffers.offset(x, 10)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), DictionaryKind)
-    end
-    return nothing
-end
-
-dictionaryEncodingStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 3)
-dictionaryEncodingAddId(b::FlatBuffers.Builder, id::Int64) = FlatBuffers.prependslot!(b, 0, id, 0)
-dictionaryEncodingAddIndexType(b::FlatBuffers.Builder, indextype::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 1, indextype, 0)
-dictionaryEncodingAddIsOrdered(b::FlatBuffers.Builder, isordered::Base.Bool) = FlatBuffers.prependslot!(b, 1, isordered, 0)
-dictionaryEncodingEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-struct Field <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Field) = (:name, :nullable, :type, :dictionary, :children, :custom_metadata)
-
-function Base.getproperty(x::Field, field::Symbol)
-    if field === :name
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return String(x, o + FlatBuffers.pos(x))
-    elseif field === :nullable
-        o = FlatBuffers.offset(x, 6)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Base.Bool)
-        return false
-    elseif field === :type
-        o = FlatBuffers.offset(x, 8)
-        if o != 0
-            T = Type(FlatBuffers.get(x, o + FlatBuffers.pos(x), UInt8))
-            o = FlatBuffers.offset(x, 10)
-            pos = FlatBuffers.union(x, o)
-            if o != 0
-                return FlatBuffers.init(T, FlatBuffers.bytes(x), pos)
-            end
-        end
-    elseif field === :dictionary
-        o = FlatBuffers.offset(x, 12)
-        if o != 0
-            y = FlatBuffers.indirect(x, o + FlatBuffers.pos(x))
-            return FlatBuffers.init(DictionaryEncoding, FlatBuffers.bytes(x), y)
-        end
-    elseif field === :children
-        o = FlatBuffers.offset(x, 14)
-        if o != 0
-            return FlatBuffers.Array{Field}(x, o)
-        end
-    elseif field === :custom_metadata
-        o = FlatBuffers.offset(x, 16)
-        if o != 0
-            return FlatBuffers.Array{KeyValue}(x, o)
-        end
-    end
-    return nothing
-end
-
-fieldStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 7)
-fieldAddName(b::FlatBuffers.Builder, name::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 0, name, 0)
-fieldAddNullable(b::FlatBuffers.Builder, nullable::Base.Bool) = FlatBuffers.prependslot!(b, 1, nullable, false)
-fieldAddTypeType(b::FlatBuffers.Builder, ::Core.Type{T}) where {T} = FlatBuffers.prependslot!(b, 2, Type(T), 0)
-fieldAddType(b::FlatBuffers.Builder, type::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 3, type, 0)
-fieldAddDictionary(b::FlatBuffers.Builder, dictionary::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 4, dictionary, 0)
-fieldAddChildren(b::FlatBuffers.Builder, children::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 5, children, 0)
-fieldStartChildrenVector(b::FlatBuffers.Builder, numelems) = FlatBuffers.startvector!(b, 4, numelems, 4)
-fieldAddCustomMetadata(b::FlatBuffers.Builder, custommetadata::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 6, custommetadata, 0)
-fieldStartCustomMetadataVector(b::FlatBuffers.Builder, numelems) = FlatBuffers.startvector!(b, 4, numelems, 4)
-fieldEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
-
-FlatBuffers.@scopedenum Endianness::Int16 Little Big
-
-struct Buffer <: FlatBuffers.Struct
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-FlatBuffers.structsizeof(::Base.Type{Buffer}) = 16
-
-Base.propertynames(x::Buffer) = (:offset, :length)
-
-function Base.getproperty(x::Buffer, field::Symbol)
-    if field === :offset
-        return FlatBuffers.get(x, FlatBuffers.pos(x), Int64)
-    elseif field === :length
-        return FlatBuffers.get(x, FlatBuffers.pos(x) + 8, Int64)
-    end
-    return nothing
-end
-
-function createBuffer(b::FlatBuffers.Builder, offset::Int64, length::Int64)
-    FlatBuffers.prep!(b, 8, 16)
-    prepend!(b, length)
-    prepend!(b, offset)
-    return FlatBuffers.offset(b)
-end
-
-struct Schema <: FlatBuffers.Table
-    bytes::Vector{UInt8}
-    pos::Base.Int
-end
-
-Base.propertynames(x::Schema) = (:endianness, :fields, :custom_metadata)
-
-function Base.getproperty(x::Schema, field::Symbol)
-    if field === :endianness
-        o = FlatBuffers.offset(x, 4)
-        o != 0 && return FlatBuffers.get(x, o + FlatBuffers.pos(x), Endianness)
-    elseif field === :fields
-        o = FlatBuffers.offset(x, 6)
-        if o != 0
-            return FlatBuffers.Array{Field}(x, o)
-        end
-    elseif field === :custom_metadata
-        o = FlatBuffers.offset(x, 8)
-        if o != 0
-            return FlatBuffers.Array{KeyValue}(x, o)
-        end
-    end
-    return nothing
-end
-
-schemaStart(b::FlatBuffers.Builder) = FlatBuffers.startobject!(b, 3)
-schemaAddEndianness(b::FlatBuffers.Builder, endianness::Endianness) = FlatBuffers.prependslot!(b, 0, endianness, 0)
-schemaAddFields(b::FlatBuffers.Builder, fields::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 1, fields, 0)
-schemaStartFieldsVector(b::FlatBuffers.Builder, numelems) = FlatBuffers.startvector!(b, 4, numelems, 4)
-schemaAddCustomMetadata(b::FlatBuffers.Builder, custommetadata::FlatBuffers.UOffsetT) = FlatBuffers.prependoffsetslot!(b, 2, custommetadata, 0)
-schemaStartCustomMetadataVector(b::FlatBuffers.Builder, numelems) = FlatBuffers.startvector!(b, 4, numelems, 4)
-schemaEnd(b::FlatBuffers.Builder) = FlatBuffers.endobject!(b)
diff --git a/julia/Arrow/src/table.jl b/julia/Arrow/src/table.jl
deleted file mode 100644
index 3e3bad8..0000000
--- a/julia/Arrow/src/table.jl
+++ /dev/null
@@ -1,556 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-struct BatchIterator
-    bytes::Vector{UInt8}
-    startpos::Int
-end
-
-"""
-    Arrow.Stream(io::IO; convert::Bool=true)
-    Arrow.Stream(file::String; convert::Bool=true)
-    Arrow.Stream(bytes::Vector{UInt8}, pos=1, len=nothing; convert::Bool=true)
-
-Start reading an arrow formatted table, from:
- * `io`, bytes will be read all at once via `read(io)`
- * `file`, bytes will be read via `Mmap.mmap(file)`
- * `bytes`, a byte vector directly, optionally allowing specifying the starting byte position `pos` and `len`
-
-Reads the initial schema message from the arrow stream/file, then returns an `Arrow.Stream` object
-which will iterate over record batch messages, producing an [`Arrow.Table`](@ref) on each iteration.
-
-By iterating [`Arrow.Table`](@ref), `Arrow.Stream` satisfies the `Tables.partitions` interface, and as such can
-be passed to Tables.jl-compatible sink functions.
-
-This allows iterating over extremely large "arrow tables" in chunks represented as record batches.
-
-Supports the `convert` keyword argument which controls whether certain arrow primitive types will be
-lazily converted to more friendly Julia defaults; by default, `convert=true`.
-"""
-struct Stream
-    batchiterator::BatchIterator
-    pos::Int
-    names::Vector{Symbol}
-    schema::Meta.Schema
-    dictencodings::Dict{Int64, DictEncoding} # dictionary id => DictEncoding
-    dictencoded::Dict{Int64, Meta.Field} # dictionary id => field
-    convert::Bool
-end
-
-Tables.partitions(x::Stream) = x
-
-Stream(io::IO, pos::Integer=1, len=nothing; convert::Bool=true) = Stream(Base.read(io), pos, len; convert=convert)
-Stream(str::String, pos::Integer=1, len=nothing; convert::Bool=true) = isfile(str) ? Stream(Mmap.mmap(str), pos, len; convert=convert) :
-    throw(ArgumentError("$str is not a file"))
-
-# will detect whether we're reading a Stream from a file or stream
-function Stream(bytes::Vector{UInt8}, off::Integer=1, tlen::Union{Integer, Nothing}=nothing; convert::Bool=true)
-    len = something(tlen, length(bytes))
-    if len > 24 &&
-        _startswith(bytes, off, FILE_FORMAT_MAGIC_BYTES) &&
-        _endswith(bytes, off + len - 1, FILE_FORMAT_MAGIC_BYTES)
-        off += 8 # skip past magic bytes + padding
-    end
-    dictencodings = Dict{Int64, DictEncoding}() # dictionary id => DictEncoding
-    dictencoded = Dict{Int64, Meta.Field}() # dictionary id => field
-    batchiterator = BatchIterator(bytes, off)
-    state = iterate(batchiterator)
-    state === nothing && throw(ArgumentError("no arrow ipc messages found in provided input"))
-    batch, (pos, id) = state
-    schema = batch.msg.header
-    schema isa Meta.Schema || throw(ArgumentError("first arrow ipc message MUST be a schema message"))
-    # assert endianness?
-    # store custom_metadata?
-    names = Symbol[]
-    for (i, field) in enumerate(schema.fields)
-        push!(names, Symbol(field.name))
-        # recursively find any dictionaries for any fields
-        getdictionaries!(dictencoded, field)
-        @debug 1 "parsed column from schema: field = $field"
-    end
-    return Stream(batchiterator, pos, names, schema, dictencodings, dictencoded, convert)
-end
-
-function Base.iterate(x::Stream, (pos, id)=(x.pos, 1))
-    columns = AbstractVector[]
-    while true
-        state = iterate(x.batchiterator, (pos, id))
-        state === nothing && return nothing
-        batch, (pos, id) = state
-        header = batch.msg.header
-        if header isa Meta.DictionaryBatch
-            id = header.id
-            recordbatch = header.data
-            @debug 1 "parsing dictionary batch message: id = $id, compression = $(recordbatch.compression)"
-            if haskey(x.dictencodings, id) && header.isDelta
-                # delta
-                field = x.dictencoded[id]
-                values, _, _ = build(field, field.type, batch, recordbatch, x.dictencodings, Int64(1), Int64(1), x.convert)
-                dictencoding = x.dictencodings[id]
-                append!(dictencoding.data, values)
-                continue
-            end
-            # new dictencoding or replace
-            field = x.dictencoded[id]
-            values, _, _ = build(field, field.type, batch, recordbatch, x.dictencodings, Int64(1), Int64(1), x.convert)
-            A = ChainedVector([values])
-            x.dictencodings[id] = DictEncoding{eltype(A), typeof(A)}(id, A, field.dictionary.isOrdered, values.metadata)
-            @debug 1 "parsed dictionary batch message: id=$id, data=$values\n"
-        elseif header isa Meta.RecordBatch
-            @debug 1 "parsing record batch message: compression = $(header.compression)"
-            for vec in VectorIterator(x.schema, batch, x.dictencodings, x.convert)
-                push!(columns, vec)
-            end
-            break
-        else
-            throw(ArgumentError("unsupported arrow message type: $(typeof(header))"))
-        end
-    end
-    lookup = Dict{Symbol, AbstractVector}()
-    types = Type[]
-    for (nm, col) in zip(x.names, columns)
-        lookup[nm] = col
-        push!(types, eltype(col))
-    end
-    return Table(x.names, types, columns, lookup, Ref(x.schema)), (pos, id)
-end
-
-"""
-    Arrow.Table(io::IO; convert::Bool=true)
-    Arrow.Table(file::String; convert::Bool=true)
-    Arrow.Table(bytes::Vector{UInt8}, pos=1, len=nothing; convert::Bool=true)
-
-Read an arrow formatted table, from:
- * `io`, bytes will be read all at once via `read(io)`
- * `file`, bytes will be read via `Mmap.mmap(file)`
- * `bytes`, a byte vector directly, optionally allowing specifying the starting byte position `pos` and `len`
-
-Returns a `Arrow.Table` object that allows column access via `table.col1`, `table[:col1]`, or `table[1]`.
-
-NOTE: the columns in an `Arrow.Table` are views into the original arrow memory, and hence are not easily
-modifiable (with e.g. `push!`, `append!`, etc.). To mutate arrow columns, call `copy(x)` to materialize
-the arrow data as a normal Julia array.
-
-`Arrow.Table` also satisfies the [Tables.jl](https://github.com/JuliaData/Tables.jl) interface, and so can easily be materialied via any supporting
-sink function: e.g. `DataFrame(Arrow.Table(file))`, `SQLite.load!(db, "table", Arrow.Table(file))`, etc.
-
-Supports the `convert` keyword argument which controls whether certain arrow primitive types will be
-lazily converted to more friendly Julia defaults; by default, `convert=true`.
-"""
-struct Table <: Tables.AbstractColumns
-    names::Vector{Symbol}
-    types::Vector{Type}
-    columns::Vector{AbstractVector}
-    lookup::Dict{Symbol, AbstractVector}
-    schema::Ref{Meta.Schema}
-end
-
-Table() = Table(Symbol[], Type[], AbstractVector[], Dict{Symbol, AbstractVector}(), Ref{Meta.Schema}())
-
-names(t::Table) = getfield(t, :names)
-types(t::Table) = getfield(t, :types)
-columns(t::Table) = getfield(t, :columns)
-lookup(t::Table) = getfield(t, :lookup)
-schema(t::Table) = getfield(t, :schema)
-
-Tables.istable(::Table) = true
-Tables.columnaccess(::Table) = true
-Tables.columns(t::Table) = Tables.CopiedColumns(t)
-Tables.schema(t::Table) = Tables.Schema(names(t), types(t))
-Tables.columnnames(t::Table) = names(t)
-Tables.getcolumn(t::Table, i::Int) = columns(t)[i]
-Tables.getcolumn(t::Table, nm::Symbol) = lookup(t)[nm]
-
-# high-level user API functions
-Table(io::IO, pos::Integer=1, len=nothing; convert::Bool=true) = Table(Base.read(io), pos, len; convert=convert)
-Table(str::String, pos::Integer=1, len=nothing; convert::Bool=true) = isfile(str) ? Table(Mmap.mmap(str), pos, len; convert=convert) :
-    throw(ArgumentError("$str is not a file"))
-
-# will detect whether we're reading a Table from a file or stream
-function Table(bytes::Vector{UInt8}, off::Integer=1, tlen::Union{Integer, Nothing}=nothing; convert::Bool=true)
-    len = something(tlen, length(bytes))
-    if len > 24 &&
-        _startswith(bytes, off, FILE_FORMAT_MAGIC_BYTES) &&
-        _endswith(bytes, off + len - 1, FILE_FORMAT_MAGIC_BYTES)
-        off += 8 # skip past magic bytes + padding
-    end
-    t = Table()
-    sch = nothing
-    dictencodings = Dict{Int64, DictEncoding}() # dictionary id => DictEncoding
-    dictencoded = Dict{Int64, Meta.Field}() # dictionary id => field
-    tsks = Channel{Task}(Inf)
-    tsk = Threads.@spawn begin
-        i = 1
-        for tsk in tsks
-            cols = fetch(tsk)
-            if i == 1
-                foreach(x -> push!(columns(t), x), cols)
-            elseif i == 2
-                foreach(1:length(cols)) do i
-                    columns(t)[i] = ChainedVector([columns(t)[i], cols[i]])
-                end
-            else
-                foreach(1:length(cols)) do i
-                    append!(columns(t)[i], cols[i])
-                end
-            end
-            i += 1
-        end
-    end
-    for batch in BatchIterator(bytes, off)
-        # store custom_metadata of batch.msg?
-        header = batch.msg.header
-        if header isa Meta.Schema
-            @debug 1 "parsing schema message"
-            # assert endianness?
-            # store custom_metadata?
-            for (i, field) in enumerate(header.fields)
-                push!(names(t), Symbol(field.name))
-                # recursively find any dictionaries for any fields
-                getdictionaries!(dictencoded, field)
-                @debug 1 "parsed column from schema: field = $field"
-            end
-            sch = header
-            schema(t)[] = sch
-        elseif header isa Meta.DictionaryBatch
-            id = header.id
-            recordbatch = header.data
-            @debug 1 "parsing dictionary batch message: id = $id, compression = $(recordbatch.compression)"
-            if haskey(dictencodings, id) && header.isDelta
-                # delta
-                field = dictencoded[id]
-                values, _, _ = build(field, field.type, batch, recordbatch, dictencodings, Int64(1), Int64(1), convert)
-                dictencoding = dictencodings[id]
-                append!(dictencoding.data, values)
-                continue
-            end
-            # new dictencoding or replace
-            field = dictencoded[id]
-            values, _, _ = build(field, field.type, batch, recordbatch, dictencodings, Int64(1), Int64(1), convert)
-            A = ChainedVector([values])
-            dictencodings[id] = DictEncoding{eltype(A), typeof(A)}(id, A, field.dictionary.isOrdered, values.metadata)
-            @debug 1 "parsed dictionary batch message: id=$id, data=$values\n"
-        elseif header isa Meta.RecordBatch
-            @debug 1 "parsing record batch message: compression = $(header.compression)"
-            put!(tsks, Threads.@spawn begin
-                collect(VectorIterator(sch, batch, dictencodings, convert))
-            end)
-        else
-            throw(ArgumentError("unsupported arrow message type: $(typeof(header))"))
-        end
-    end
-    close(tsks)
-    wait(tsk)
-    lu = lookup(t)
-    ty = types(t)
-    for (nm, col) in zip(names(t), columns(t))
-        lu[nm] = col
-        push!(ty, eltype(col))
-    end
-    meta = sch !== nothing ? sch.custom_metadata : nothing
-    if meta !== nothing
-        setmetadata!(t, Dict(String(kv.key) => String(kv.value) for kv in meta))
-    end
-    return t
-end
-
-function getdictionaries!(dictencoded, field)
-    d = field.dictionary
-    if d !== nothing
-        dictencoded[d.id] = field
-    end
-    for child in field.children
-        getdictionaries!(dictencoded, child)
-    end
-    return
-end
-
-struct Batch
-    msg::Meta.Message
-    bytes::Vector{UInt8}
-    pos::Int
-    id::Int
-end
-
-function Base.iterate(x::BatchIterator, (pos, id)=(x.startpos, 0))
-    @debug 1 "checking for next arrow message: pos = $pos"
-    if pos + 3 > length(x.bytes)
-        @debug 1 "not enough bytes left for another batch message"
-        return nothing
-    end
-    if readbuffer(x.bytes, pos, UInt32) != CONTINUATION_INDICATOR_BYTES
-        @debug 1 "didn't find continuation byte to keep parsing messages: $(readbuffer(x.bytes, pos, UInt32))"
-        return nothing
-    end
-    pos += 4
-    if pos + 3 > length(x.bytes)
-        @debug 1 "not enough bytes left to read length of another batch message"
-        return nothing
-    end
-    msglen = readbuffer(x.bytes, pos, Int32)
-    if msglen == 0
-        @debug 1 "message has 0 length; terminating message parsing"
-        return nothing
-    end
-    pos += 4
-    msg = FlatBuffers.getrootas(Meta.Message, x.bytes, pos-1)
-    pos += msglen
-    # pos now points to message body
-    @debug 1 "parsing message: pos = $pos, msglen = $msglen, bodyLength = $(msg.bodyLength)"
-    return Batch(msg, x.bytes, pos, id), (pos + msg.bodyLength, id + 1)
-end
-
-struct VectorIterator
-    schema::Meta.Schema
-    batch::Batch # batch.msg.header MUST BE RecordBatch
-    dictencodings::Dict{Int64, DictEncoding}
-    convert::Bool
-end
-
-buildmetadata(f::Meta.Field) = buildmetadata(f.custom_metadata)
-buildmetadata(meta) = Dict(String(kv.key) => String(kv.value) for kv in meta)
-buildmetadata(::Nothing) = nothing
-
-function Base.iterate(x::VectorIterator, (columnidx, nodeidx, bufferidx)=(Int64(1), Int64(1), Int64(1)))
-    columnidx > length(x.schema.fields) && return nothing
-    field = x.schema.fields[columnidx]
-    @debug 2 "building top-level column: field = $(field), columnidx = $columnidx, nodeidx = $nodeidx, bufferidx = $bufferidx"
-    A, nodeidx, bufferidx = build(field, x.batch, x.batch.msg.header, x.dictencodings, nodeidx, bufferidx, x.convert)
-    @debug 2 "built top-level column: A = $(typeof(A)), columnidx = $columnidx, nodeidx = $nodeidx, bufferidx = $bufferidx"
-    @debug 3 A
-    return A, (columnidx + 1, nodeidx, bufferidx)
-end
-
-Base.length(x::VectorIterator) = length(x.schema.fields)
-
-const ListTypes = Union{Meta.Utf8, Meta.LargeUtf8, Meta.Binary, Meta.LargeBinary, Meta.List, Meta.LargeList}
-const LargeLists = Union{Meta.LargeUtf8, Meta.LargeBinary, Meta.LargeList}
-
-function build(field::Meta.Field, batch, rb, de, nodeidx, bufferidx, convert)
-    d = field.dictionary
-    if d !== nothing
-        validity = buildbitmap(batch, rb, nodeidx, bufferidx)
-        bufferidx += 1
-        buffer = rb.buffers[bufferidx]
-        S = d.indexType === nothing ? Int32 : juliaeltype(field, d.indexType, false)
-        bytes, indices = reinterp(S, batch, buffer, rb.compression)
-        encoding = de[d.id]
-        A = DictEncoded(bytes, validity, indices, encoding, buildmetadata(field.custom_metadata))
-        nodeidx += 1
-        bufferidx += 1
-    else
-        A, nodeidx, bufferidx = build(field, field.type, batch, rb, de, nodeidx, bufferidx, convert)
-    end
-    return A, nodeidx, bufferidx
-end
-
-function buildbitmap(batch, rb, nodeidx, bufferidx)
-    buffer = rb.buffers[bufferidx]
-    voff = batch.pos + buffer.offset
-    node = rb.nodes[nodeidx]
-    if rb.compression === nothing
-        return ValidityBitmap(batch.bytes, voff, node.length, node.null_count)
-    else
-        # compressed
-        ptr = pointer(batch.bytes, voff)
-        _, decodedbytes = uncompress(ptr, buffer, rb.compression)
-        return ValidityBitmap(decodedbytes, 1, node.length, node.null_count)
-    end
-end
-
-function uncompress(ptr::Ptr{UInt8}, buffer, compression)
-    if buffer.length == 0
-        return 0, UInt8[]
-    end
-    len = unsafe_load(convert(Ptr{Int64}, ptr))
-    ptr += 8 # skip past uncompressed length as Int64
-    encodedbytes = unsafe_wrap(Array, ptr, buffer.length - 8)
-    if compression.codec === Meta.CompressionType.LZ4_FRAME
-        decodedbytes = transcode(LZ4FrameDecompressor, encodedbytes)
-    elseif compression.codec === Meta.CompressionType.ZSTD
-        decodedbytes = transcode(ZstdDecompressor, encodedbytes)
-    else
-        error("unsupported compression type when reading arrow buffers: $(typeof(compression.codec))")
-    end
-    return len, decodedbytes
-end
-
-function reinterp(::Type{T}, batch, buf, compression) where {T}
-    ptr = pointer(batch.bytes, batch.pos + buf.offset)
-    if compression === nothing
-        return batch.bytes, unsafe_wrap(Array, convert(Ptr{T}, ptr), div(buf.length, sizeof(T)))
-    else
-        # compressed
-        len, decodedbytes = uncompress(ptr, buf, compression)
-        return decodedbytes, unsafe_wrap(Array, convert(Ptr{T}, pointer(decodedbytes)), div(len, sizeof(T)))
-    end
-end
-
-function build(f::Meta.Field, L::ListTypes, batch, rb, de, nodeidx, bufferidx, convert)
-    @debug 2 "building array: L = $L"
-    validity = buildbitmap(batch, rb, nodeidx, bufferidx)
-    bufferidx += 1
-    buffer = rb.buffers[bufferidx]
-    ooff = batch.pos + buffer.offset
-    OT = L isa LargeLists ? Int64 : Int32
-    bytes, offs = reinterp(OT, batch, buffer, rb.compression)
-    offsets = Offsets(bytes, offs)
-    bufferidx += 1
-    len = rb.nodes[nodeidx].length
-    nodeidx += 1
-    if L isa Meta.Utf8 || L isa Meta.LargeUtf8 || L isa Meta.Binary || L isa Meta.LargeBinary
-        buffer = rb.buffers[bufferidx]
-        bytes, A = reinterp(UInt8, batch, buffer, rb.compression)
-        bufferidx += 1
-    else
-        bytes = UInt8[]
-        A, nodeidx, bufferidx = build(f.children[1], batch, rb, de, nodeidx, bufferidx, convert)
-    end
-    meta = buildmetadata(f.custom_metadata)
-    T = juliaeltype(f, meta, convert)
-    return List{T, OT, typeof(A)}(bytes, validity, offsets, A, len, meta), nodeidx, bufferidx
-end
-
-function build(f::Meta.Field, L::Union{Meta.FixedSizeBinary, Meta.FixedSizeList}, batch, rb, de, nodeidx, bufferidx, convert)
-    @debug 2 "building array: L = $L"
-    validity = buildbitmap(batch, rb, nodeidx, bufferidx)
-    bufferidx += 1
-    len = rb.nodes[nodeidx].length
-    nodeidx += 1
-    if L isa Meta.FixedSizeBinary
-        buffer = rb.buffers[bufferidx]
-        bytes, A = reinterp(UInt8, batch, buffer, rb.compression)
-        bufferidx += 1
-    else
-        bytes = UInt8[]
-        A, nodeidx, bufferidx = build(f.children[1], batch, rb, de, nodeidx, bufferidx, convert)
-    end
-    meta = buildmetadata(f.custom_metadata)
-    T = juliaeltype(f, meta, convert)
-    return FixedSizeList{T, typeof(A)}(bytes, validity, A, len, meta), nodeidx, bufferidx
-end
-
-function build(f::Meta.Field, L::Meta.Map, batch, rb, de, nodeidx, bufferidx, convert)
-    @debug 2 "building array: L = $L"
-    validity = buildbitmap(batch, rb, nodeidx, bufferidx)
-    bufferidx += 1
-    buffer = rb.buffers[bufferidx]
-    ooff = batch.pos + buffer.offset
-    OT = Int32
-    bytes, offs = reinterp(OT, batch, buffer, rb.compression)
-    offsets = Offsets(bytes, offs)
-    bufferidx += 1
-    len = rb.nodes[nodeidx].length
-    nodeidx += 1
-    A, nodeidx, bufferidx = build(f.children[1], batch, rb, de, nodeidx, bufferidx, convert)
-    meta = buildmetadata(f.custom_metadata)
-    T = juliaeltype(f, meta, convert)
-    return Map{T, OT, typeof(A)}(validity, offsets, A, len, meta), nodeidx, bufferidx
-end
-
-function build(f::Meta.Field, L::Meta.Struct, batch, rb, de, nodeidx, bufferidx, convert)
-    @debug 2 "building array: L = $L"
-    validity = buildbitmap(batch, rb, nodeidx, bufferidx)
-    bufferidx += 1
-    len = rb.nodes[nodeidx].length
-    vecs = []
-    nodeidx += 1
-    for child in f.children
-        A, nodeidx, bufferidx = build(child, batch, rb, de, nodeidx, bufferidx, convert)
-        push!(vecs, A)
-    end
-    data = Tuple(vecs)
-    meta = buildmetadata(f.custom_metadata)
-    T = juliaeltype(f, meta, convert)
-    return Struct{T, typeof(data)}(validity, data, len, meta), nodeidx, bufferidx
-end
-
-function build(f::Meta.Field, L::Meta.Union, batch, rb, de, nodeidx, bufferidx, convert)
-    @debug 2 "building array: L = $L"
-    buffer = rb.buffers[bufferidx]
-    bytes, typeIds = reinterp(UInt8, batch, buffer, rb.compression)
-    bufferidx += 1
-    if L.mode == Meta.UnionMode.Dense
-        buffer = rb.buffers[bufferidx]
-        bytes2, offsets = reinterp(Int32, batch, buffer, rb.compression)
-        bufferidx += 1
-    end
-    vecs = []
-    nodeidx += 1
-    for child in f.children
-        A, nodeidx, bufferidx = build(child, batch, rb, de, nodeidx, bufferidx, convert)
-        push!(vecs, A)
-    end
-    data = Tuple(vecs)
-    meta = buildmetadata(f.custom_metadata)
-    T = juliaeltype(f, meta, convert)
-    if L.mode == Meta.UnionMode.Dense
-        B = DenseUnion{T, typeof(data)}(bytes, bytes2, typeIds, offsets, data, meta)
-    else
-        B = SparseUnion{T, typeof(data)}(bytes, typeIds, data, meta)
-    end
-    return B, nodeidx, bufferidx
-end
-
-function build(f::Meta.Field, L::Meta.Null, batch, rb, de, nodeidx, bufferidx, convert)
-    @debug 2 "building array: L = $L"
-    return MissingVector(rb.nodes[nodeidx].length), nodeidx + 1, bufferidx
-end
-
-# primitives
-function build(f::Meta.Field, ::L, batch, rb, de, nodeidx, bufferidx, convert) where {L}
-    @debug 2 "building array: L = $L"
-    validity = buildbitmap(batch, rb, nodeidx, bufferidx)
-    bufferidx += 1
-    buffer = rb.buffers[bufferidx]
-    meta = buildmetadata(f.custom_metadata)
-    # get storage type (non-converted)
-    T = juliaeltype(f, nothing, false)
-    @debug 2 "storage type for primitive: T = $T"
-    bytes, A = reinterp(Base.nonmissingtype(T), batch, buffer, rb.compression)
-    len = rb.nodes[nodeidx].length
-    T = juliaeltype(f, meta, convert)
-    @debug 2 "final julia type for primitive: T = $T"
-    return Primitive(T, bytes, validity, A, len, meta), nodeidx + 1, bufferidx + 1
-end
-
-function build(f::Meta.Field, L::Meta.Bool, batch, rb, de, nodeidx, bufferidx, convert)
-    @debug 2 "building array: L = $L"
-    validity = buildbitmap(batch, rb, nodeidx, bufferidx)
-    bufferidx += 1
-    buffer = rb.buffers[bufferidx]
-    meta = buildmetadata(f.custom_metadata)
-    # get storage type (non-converted)
-    T = juliaeltype(f, nothing, false)
-    @debug 2 "storage type for primitive: T = $T"
-    buffer = rb.buffers[bufferidx]
-    voff = batch.pos + buffer.offset
-    node = rb.nodes[nodeidx]
-    if rb.compression === nothing
-        decodedbytes = batch.bytes
-        pos = voff
-        # return ValidityBitmap(batch.bytes, voff, node.length, node.null_count)
-    else
-        # compressed
-        ptr = pointer(batch.bytes, voff)
-        _, decodedbytes = uncompress(ptr, buffer, rb.compression)
-        pos = 1
-        # return ValidityBitmap(decodedbytes, 1, node.length, node.null_count)
-    end
-    len = rb.nodes[nodeidx].length
-    T = juliaeltype(f, meta, convert)
-    return BoolVector{T}(decodedbytes, pos, validity, len, meta), nodeidx + 1, bufferidx + 1
-end
diff --git a/julia/Arrow/src/utils.jl b/julia/Arrow/src/utils.jl
deleted file mode 100644
index 4f741c9..0000000
--- a/julia/Arrow/src/utils.jl
+++ /dev/null
@@ -1,200 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Determines the total number of bytes needed to store `n` bytes with padding.
-# Note that the Arrow standard requires buffers to be aligned to 8-byte boundaries.
-padding(n::Integer, alignment) = ((n + alignment - 1) ÷ alignment) * alignment
-
-paddinglength(n::Integer, alignment) = padding(n, alignment) - n
-
-function writezeros(io::IO, n::Integer)
-    s = 0
-    for i ∈ 1:n
-        s += Base.write(io, 0x00)
-    end
-    s
-end
-
-# efficient writing of arrays
-writearray(io, col) = writearray(io, maybemissing(eltype(col)), col)
-
-function writearray(io::IO, ::Type{T}, col) where {T}
-    if col isa Vector{T}
-        n = Base.write(io, col)
-    elseif isbitstype(T) && (col isa Vector{Union{T, Missing}} || col isa SentinelVector{T, T, Missing, Vector{T}})
-        # need to write the non-selector bytes of isbits Union Arrays
-        n = Base.unsafe_write(io, pointer(col), sizeof(T) * length(col))
-    elseif col isa ChainedVector
-        n = 0
-        for A in col.arrays
-            n += writearray(io, T, A)
-        end
-    else
-        n = 0
-        data = Vector{UInt8}(undef, sizeof(col))
-        buf = IOBuffer(data; write=true)
-        for x in col
-            n += Base.write(buf, coalesce(x, ArrowTypes.default(T)))
-        end
-        n = Base.write(io, take!(buf))
-    end
-    return n
-end
-
-getbit(v::UInt8, n::Integer) = Bool((v & 0x02^(n - 1)) >> (n - 1))
-
-function setbit(v::UInt8, b::Bool, n::Integer)
-    if b
-        v | 0x02^(n - 1)
-    else
-        v & (0xff ⊻ 0x02^(n - 1))
-    end
-end
-
-# Determines the number of bytes used by `n` bits, optionally with padding.
-function bitpackedbytes(n::Integer, alignment)
-    ℓ = cld(n, 8)
-    return ℓ + paddinglength(ℓ, alignment)
-end
-
-# count # of missing elements in an iterable
-nullcount(col) = count(ismissing, col)
-
-# like startswith/endswith for strings, but on byte buffers
-function _startswith(a::AbstractVector{UInt8}, pos::Integer, b::AbstractVector{UInt8})
-    for i = 1:length(b)
-        @inbounds check = a[pos + i - 1] == b[i]
-        check || return false
-    end
-    return true
-end
-
-function _endswith(a::AbstractVector{UInt8}, endpos::Integer, b::AbstractVector{UInt8})
-    aoff = endpos - length(b) + 1
-    for i = 1:length(b)
-        @inbounds check = a[aoff] == b[i]
-        check || return false
-        aoff += 1
-    end
-    return true
-end
-
-# read a single element from a byte vector
-# copied from read(::IOBuffer, T) in Base
-function readbuffer(t::AbstractVector{UInt8}, pos::Integer, ::Type{T}) where {T}
-    GC.@preserve t begin
-        ptr::Ptr{T} = pointer(t, pos)
-        x = unsafe_load(ptr)
-    end
-end
-
-# given a number of unique values; what dict encoding _index_ type is most appropriate
-encodingtype(n) = n < div(typemax(Int8), 2) ? Int8 : n < div(typemax(Int16), 2) ? Int16 : n < div(typemax(Int32), 2) ? Int32 : Int64
-
-# lazily call convert(T, x) on getindex for each x in data
-struct Converter{T, A} <: AbstractVector{T}
-    data::A
-end
-
-converter(::Type{T}, x::A) where {T, A} = Converter{eltype(A) >: Missing ? Union{T, Missing} : T, A}(x)
-converter(::Type{T}, x::ChainedVector{A}) where {T, A} = ChainedVector([converter(T, x) for x in x.arrays])
-
-Base.IndexStyle(::Type{<:Converter}) = Base.IndexLinear()
-Base.size(x::Converter) = (length(x.data),)
-Base.eltype(x::Converter{T, A}) where {T, A} = T
-Base.getindex(x::Converter{T}, i::Int) where {T} = ArrowTypes.arrowconvert(T, getindex(x.data, i))
-
-maybemissing(::Type{T}) where {T} = T === Missing ? Missing : Base.nonmissingtype(T)
-
-function getfooter(filebytes)
-    len = readbuffer(filebytes, length(filebytes) - 9, Int32)
-    FlatBuffers.getrootas(Meta.Footer, filebytes[end-(9 + len):end-10], 0)
-end
-
-function getrb(filebytes)
-    f = getfooter(filebytes)
-    rb = f.recordBatches[1]
-    return filebytes[rb.offset+1:(rb.offset+1+rb.metaDataLength)]
-    # FlatBuffers.getrootas(Meta.Message, filebytes, rb.offset)
-end
-
-function readmessage(filebytes, off=9)
-    @assert readbuffer(filebytes, off, UInt32) === 0xFFFFFFFF
-    len = readbuffer(filebytes, off + 4, Int32)
-
-    FlatBuffers.getrootas(Meta.Message, filebytes, off + 8)
-end
-
-# a custom Channel type that only allows put!-ing objects in a specific, monotonically increasing order
-struct OrderedChannel{T}
-    chan::Channel{T}
-    cond::Threads.Condition
-    i::Ref{Int}
-end
-
-OrderedChannel{T}(sz) where {T} = OrderedChannel{T}(Channel{T}(sz), Threads.Condition(), Ref(1))
-Base.iterate(ch::OrderedChannel, st...) = iterate(ch.chan, st...)
-
-macro lock(obj, expr)
-    esc(quote
-        lock($obj)
-        try
-            $expr
-        finally
-            unlock($obj)
-        end
-    end)
-end
-
-# when put!-ing an object, operation may have to wait until other tasks have put their
-# objects to ensure the channel is ordered correctly
-function Base.put!(ch::OrderedChannel{T}, x::T, i::Integer, incr::Bool=false) where {T}
-    @lock ch.cond begin
-        while ch.i[] < i
-            # channel index too early, need to wait for other tasks to put their objects first
-            wait(ch.cond)
-        end
-        # now it's our turn
-        put!(ch.chan, x)
-        if incr
-            ch.i[] += 1
-        end
-        # wake up tasks that may be waiting to put their objects
-        notify(ch.cond)
-    end
-    return
-end
-
-function Base.close(ch::OrderedChannel)
-    @lock ch.cond begin
-        # just need to ensure any tasks waiting to put their tasks have had a chance to put
-        while Base.n_waiters(ch.cond) > 0
-            wait(ch.cond)
-        end
-        close(ch.chan)
-    end
-    return
-end
-
-struct Lockable{T}
-    x::T
-    lock::ReentrantLock
-end
-
-Lockable(x::T) where {T} = Lockable{T}(x, ReentrantLock())
-
-Base.lock(x::Lockable) = lock(x.lock)
-Base.unlock(x::Lockable) = unlock(x.lock)
diff --git a/julia/Arrow/src/write.jl b/julia/Arrow/src/write.jl
deleted file mode 100644
index 85188a0..0000000
--- a/julia/Arrow/src/write.jl
+++ /dev/null
@@ -1,456 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-const OBJ_METADATA = IdDict{Any, Dict{String, String}}()
-
-"""
-    Arrow.setmetadata!(x, metadata::Dict{String, String})
-
-Set the metadata for any object, provided as a `Dict{String, String}`.
-Metadata attached to a table or column will be serialized when written
-as a stream or file.
-"""
-function setmetadata!(x, meta::Dict{String, String})
-    OBJ_METADATA[x] = meta
-    return
-end
-
-"""
-    Arrow.getmetadata(x) => Dict{String, String}
-
-Retrieve any metadata (as a `Dict{String, String}`) attached to an object.
-
-Metadata may be attached to any object via [`Arrow.setmetadata!`](@ref),
-or deserialized via the arrow format directly (the format allows attaching metadata
-to table, column, and other objects).
-"""
-getmetadata(x, default=nothing) = get(OBJ_METADATA, x, default)
-
-"""
-    Arrow.write(io::IO, tbl)
-    Arrow.write(file::String, tbl)
-    tbl |> Arrow.write(io_or_file)
-
-Write any [Tables.jl](https://github.com/JuliaData/Tables.jl)-compatible `tbl` out as arrow formatted data.
-Providing an `io::IO` argument will cause the data to be written to it
-in the ["streaming" format](https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format), unless `file=true` keyword argument is passed.
-Providing a `file::String` argument will result in the ["file" format](https://arrow.apache.org/docs/format/Columnar.html#ipc-file-format) being written.
-
-Multiple record batches will be written based on the number of
-`Tables.partitions(tbl)` that are provided; by default, this is just
-one for a given table, but some table sources support automatic
-partitioning. Note you can turn multiple table objects into partitions
-by doing `Tables.partitioner([tbl1, tbl2, ...])`, but note that
-each table must have the exact same `Tables.Schema`.
-
-By default, `Arrow.write` will use multiple threads to write multiple
-record batches simultaneously (e.g. if julia is started with `julia -t 8` or the `JULIA_NUM_THREADS` environment variable is set).
-
-Supported keyword arguments to `Arrow.write` include:
-  * `compress`: possible values include `:lz4`, `:zstd`, or your own initialized `LZ4FrameCompressor` or `ZstdCompressor` objects; will cause all buffers in each record batch to use the respective compression encoding
-  * `alignment::Int=8`: specify the number of bytes to align buffers to when written in messages; strongly recommended to only use alignment values of 8 or 64 for modern memory cache line optimization
-  * `dictencode::Bool=false`: whether all columns should use dictionary encoding when being written; to dict encode specific columns, wrap the column/array in `Arrow.DictEncode(col)`
-  * `dictencodenested::Bool=false`: whether nested data type columns should also dict encode nested arrays/buffers; other language implementations [may not support this](https://arrow.apache.org/docs/status.html)
-  * `denseunions::Bool=true`: whether Julia `Vector{<:Union}` arrays should be written using the dense union layout; passing `false` will result in the sparse union layout
-  * `largelists::Bool=false`: causes list column types to be written with Int64 offset arrays; mainly for testing purposes; by default, Int64 offsets will be used only if needed
-  * `file::Bool=false`: if a an `io` argument is being written to, passing `file=true` will cause the arrow file format to be written instead of just IPC streaming
-"""
-function write end
-
-write(io_or_file; kw...) = x -> write(io_or_file, x; kw...)
-
-function write(file::String, tbl; largelists::Bool=false, compress::Union{Nothing, Symbol, LZ4FrameCompressor, ZstdCompressor}=nothing, denseunions::Bool=true, dictencode::Bool=false, dictencodenested::Bool=false, alignment::Int=8)
-    open(file, "w") do io
-        write(io, tbl, true, largelists, compress, denseunions, dictencode, dictencodenested, alignment)
-    end
-    return file
-end
-
-function write(io::IO, tbl; largelists::Bool=false, compress::Union{Nothing, Symbol, LZ4FrameCompressor, ZstdCompressor}=nothing, denseunions::Bool=true, dictencode::Bool=false, dictencodenested::Bool=false, alignment::Int=8, file::Bool=false)
-    return write(io, tbl, file, largelists, compress, denseunions, dictencode, dictencodenested, alignment)
-end
-
-function write(io, source, writetofile, largelists, compress, denseunions, dictencode, dictencodenested, alignment)
-    if compress === :lz4
-        compress = LZ4_FRAME_COMPRESSOR[]
-    elseif compress === :zstd
-        compress = ZSTD_COMPRESSOR[]
-    elseif compress isa Symbol
-        throw(ArgumentError("unsupported compress keyword argument value: $compress. Valid values include `:lz4` or `:zstd`"))
-    end
-    if writetofile
-        @debug 1 "starting write of arrow formatted file"
-        Base.write(io, "ARROW1\0\0")
-    end
-    msgs = OrderedChannel{Message}(Inf)
-    # build messages
-    sch = Ref{Tables.Schema}()
-    firstcols = Ref{Any}()
-    dictencodings = Dict{Int64, Any}() # Lockable{DictEncoding}
-    blocks = (Block[], Block[])
-    # start message writing from channel
-    tsk = Threads.@spawn for msg in msgs
-        Base.write(io, msg, blocks, sch, alignment)
-    end
-    @sync for (i, tbl) in enumerate(Tables.partitions(source))
-        @debug 1 "processing table partition i = $i"
-        if i == 1
-            cols = toarrowtable(tbl, dictencodings, largelists, compress, denseunions, dictencode, dictencodenested)
-            sch[] = Tables.schema(cols)
-            firstcols[] = cols
-            put!(msgs, makeschemamsg(sch[], cols), i)
-            if !isempty(dictencodings)
-                des = sort!(collect(dictencodings); by=x->x.first, rev=true)
-                for (id, delock) in des
-                    # assign dict encoding ids
-                    de = delock.x
-                    dictsch = Tables.Schema((:col,), (eltype(de.data),))
-                    put!(msgs, makedictionarybatchmsg(dictsch, (col=de.data,), id, false, alignment), i)
-                end
-            end
-            put!(msgs, makerecordbatchmsg(sch[], cols, alignment), i, true)
-        else
-            Threads.@spawn begin
-                cols = toarrowtable(tbl, dictencodings, largelists, compress, denseunions, dictencode, dictencodenested)
-                if !isempty(cols.dictencodingdeltas)
-                    for de in cols.dictencodingdeltas
-                        dictsch = Tables.Schema((:col,), (eltype(de.data),))
-                        put!(msgs, makedictionarybatchmsg(dictsch, (col=de.data,), de.id, true, alignment), i)
-                    end
-                end
-                put!(msgs, makerecordbatchmsg(sch[], cols, alignment), i, true)
-            end
-        end
-    end
-    # close our message-writing channel, no further put!-ing is allowed
-    close(msgs)
-    # now wait for our message-writing task to finish writing
-    wait(tsk)
-    # write empty message
-    if !writetofile
-        Base.write(io, Message(UInt8[], nothing, 0, true, false), blocks, sch, alignment)
-    end
-    if writetofile
-        b = FlatBuffers.Builder(1024)
-        schfoot = makeschema(b, sch[], firstcols[])
-        if !isempty(blocks[1])
-            N = length(blocks[1])
-            Meta.footerStartRecordBatchesVector(b, N)
-            for blk in Iterators.reverse(blocks[1])
-                Meta.createBlock(b, blk.offset, blk.metaDataLength, blk.bodyLength)
-            end
-            recordbatches = FlatBuffers.endvector!(b, N)
-        else
-            recordbatches = FlatBuffers.UOffsetT(0)
-        end
-        if !isempty(blocks[2])
-            N = length(blocks[2])
-            Meta.footerStartDictionariesVector(b, N)
-            for blk in Iterators.reverse(blocks[2])
-                Meta.createBlock(b, blk.offset, blk.metaDataLength, blk.bodyLength)
-            end
-            dicts = FlatBuffers.endvector!(b, N)
-        else
-            dicts = FlatBuffers.UOffsetT(0)
-        end
-        Meta.footerStart(b)
-        Meta.footerAddVersion(b, Meta.MetadataVersion.V4)
-        Meta.footerAddSchema(b, schfoot)
-        Meta.footerAddDictionaries(b, dicts)
-        Meta.footerAddRecordBatches(b, recordbatches)
-        foot = Meta.footerEnd(b)
-        FlatBuffers.finish!(b, foot)
-        footer = FlatBuffers.finishedbytes(b)
-        Base.write(io, footer)
-        Base.write(io, Int32(length(footer)))
-        Base.write(io, "ARROW1")
-    end
-    return io
-end
-
-struct ToArrowTable
-    sch::Tables.Schema
-    cols::Vector{Any}
-    metadata::Union{Nothing, Dict{String, String}}
-    dictencodingdeltas::Vector{DictEncoding}
-end
-
-function toarrowtable(x, dictencodings, largelists, compress, denseunions, dictencode, dictencodenested)
-    @debug 1 "converting input table to arrow formatted columns"
-    cols = Tables.columns(x)
-    meta = getmetadata(cols)
-    sch = Tables.schema(cols)
-    types = collect(sch.types)
-    N = length(types)
-    newcols = Vector{Any}(undef, N)
-    newtypes = Vector{Type}(undef, N)
-    dictencodingdeltas = DictEncoding[]
-    Tables.eachcolumn(sch, cols) do col, i, nm
-        newcol = toarrowvector(col, i, dictencodings, dictencodingdeltas; compression=compress, largelists=largelists, denseunions=denseunions, dictencode=dictencode, dictencodenested=dictencodenested)
-        newtypes[i] = eltype(newcol)
-        newcols[i] = newcol
-    end
-    minlen, maxlen = extrema(length, newcols)
-    minlen == maxlen || throw(ArgumentError("columns with unequal lengths detected: $minlen < $maxlen"))
-    return ToArrowTable(Tables.Schema(sch.names, newtypes), newcols, meta, dictencodingdeltas)
-end
-
-Tables.columns(x::ToArrowTable) = x
-Tables.rowcount(x::ToArrowTable) = length(x.cols) == 0 ? 0 : length(x.cols[1])
-Tables.schema(x::ToArrowTable) = x.sch
-Tables.columnnames(x::ToArrowTable) = x.sch.names
-Tables.getcolumn(x::ToArrowTable, i::Int) = x.cols[i]
-
-struct Message
-    msgflatbuf
-    columns
-    bodylen
-    isrecordbatch::Bool
-    blockmsg::Bool
-end
-
-struct Block
-    offset::Int64
-    metaDataLength::Int32
-    bodyLength::Int64
-end
-
-function Base.write(io::IO, msg::Message, blocks, sch, alignment)
-    metalen = padding(length(msg.msgflatbuf), alignment)
-    @debug 1 "writing message: metalen = $metalen, bodylen = $(msg.bodylen), isrecordbatch = $(msg.isrecordbatch)"
-    if msg.blockmsg
-        push!(blocks[msg.isrecordbatch ? 1 : 2], Block(position(io), metalen + 8, msg.bodylen))
-    end
-    # now write the final message spec out
-    # continuation byte
-    n = Base.write(io, 0xFFFFFFFF)
-    # metadata length
-    n += Base.write(io, Int32(metalen))
-    # message flatbuffer
-    n += Base.write(io, msg.msgflatbuf)
-    n += writezeros(io, paddinglength(length(msg.msgflatbuf), alignment))
-    # message body
-    if msg.columns !== nothing
-        # write out buffers
-        for col in Tables.Columns(msg.columns)
-            writebuffer(io, col, alignment)
-        end
-    end
-    return n
-end
-
-function makemessage(b, headerType, header, columns=nothing, bodylen=0)
-    # write the message flatbuffer object
-    Meta.messageStart(b)
-    Meta.messageAddVersion(b, Meta.MetadataVersion.V5)
-    Meta.messageAddHeaderType(b, headerType)
-    Meta.messageAddHeader(b, header)
-    Meta.messageAddBodyLength(b, Int64(bodylen))
-    # Meta.messageAddCustomMetadata(b, meta)
-    # Meta.messageStartCustomMetadataVector(b, num_meta_elems)
-    msg = Meta.messageEnd(b)
-    FlatBuffers.finish!(b, msg)
-    return Message(FlatBuffers.finishedbytes(b), columns, bodylen, headerType == Meta.RecordBatch, headerType == Meta.RecordBatch || headerType == Meta.DictionaryBatch)
-end
-
-function makeschema(b, sch::Tables.Schema{names}, columns) where {names}
-    # build Field objects
-    N = length(names)
-    fieldoffsets = [fieldoffset(b, names[i], columns.cols[i]) for i = 1:N]
-    Meta.schemaStartFieldsVector(b, N)
-    for off in Iterators.reverse(fieldoffsets)
-        FlatBuffers.prependoffset!(b, off)
-    end
-    fields = FlatBuffers.endvector!(b, N)
-    if columns.metadata !== nothing
-        kvs = columns.metadata
-        kvoffs = Vector{FlatBuffers.UOffsetT}(undef, length(kvs))
-        for (i, (k, v)) in enumerate(kvs)
-            koff = FlatBuffers.createstring!(b, String(k))
-            voff = FlatBuffers.createstring!(b, String(v))
-            Meta.keyValueStart(b)
-            Meta.keyValueAddKey(b, koff)
-            Meta.keyValueAddValue(b, voff)
-            kvoffs[i] = Meta.keyValueEnd(b)
-        end
-        Meta.schemaStartCustomMetadataVector(b, length(kvs))
-        for off in Iterators.reverse(kvoffs)
-            FlatBuffers.prependoffset!(b, off)
-        end
-        meta = FlatBuffers.endvector!(b, length(kvs))
-    else
-        meta = FlatBuffers.UOffsetT(0)
-    end
-    # write schema object
-    Meta.schemaStart(b)
-    Meta.schemaAddEndianness(b, Meta.Endianness.Little)
-    Meta.schemaAddFields(b, fields)
-    Meta.schemaAddCustomMetadata(b, meta)
-    return Meta.schemaEnd(b)
-end
-
-function makeschemamsg(sch::Tables.Schema, columns)
-    @debug 1 "building schema message: sch = $sch"
-    b = FlatBuffers.Builder(1024)
-    schema = makeschema(b, sch, columns)
-    return makemessage(b, Meta.Schema, schema)
-end
-
-function fieldoffset(b, name, col)
-    nameoff = FlatBuffers.createstring!(b, String(name))
-    T = eltype(col)
-    nullable = T >: Missing
-    # check for custom metadata
-    if getmetadata(col) !== nothing
-        kvs = getmetadata(col)
-        kvoffs = Vector{FlatBuffers.UOffsetT}(undef, length(kvs))
-        for (i, (k, v)) in enumerate(kvs)
-            koff = FlatBuffers.createstring!(b, String(k))
-            voff = FlatBuffers.createstring!(b, String(v))
-            Meta.keyValueStart(b)
-            Meta.keyValueAddKey(b, koff)
-            Meta.keyValueAddValue(b, voff)
-            kvoffs[i] = Meta.keyValueEnd(b)
-        end
-        Meta.fieldStartCustomMetadataVector(b, length(kvs))
-        for off in Iterators.reverse(kvoffs)
-            FlatBuffers.prependoffset!(b, off)
-        end
-        meta = FlatBuffers.endvector!(b, length(kvs))
-    else
-        meta = FlatBuffers.UOffsetT(0)
-    end
-    # build dictionary
-    if isdictencoded(col)
-        encodingtype = indtype(col)
-        IT, inttype, _ = arrowtype(b, encodingtype)
-        Meta.dictionaryEncodingStart(b)
-        Meta.dictionaryEncodingAddId(b, Int64(getid(col)))
-        Meta.dictionaryEncodingAddIndexType(b, inttype)
-        # TODO: support isOrdered?
-        Meta.dictionaryEncodingAddIsOrdered(b, false)
-        dict = Meta.dictionaryEncodingEnd(b)
-    else
-        dict = FlatBuffers.UOffsetT(0)
-    end
-    type, typeoff, children = arrowtype(b, col)
-    if children !== nothing
-        Meta.fieldStartChildrenVector(b, length(children))
-        for off in Iterators.reverse(children)
-            FlatBuffers.prependoffset!(b, off)
-        end
-        children = FlatBuffers.endvector!(b, length(children))
-    else
-        Meta.fieldStartChildrenVector(b, 0)
-        children = FlatBuffers.endvector!(b, 0)
-    end
-    # build field object
-    if isdictencoded(col)
-        @debug 1 "building field: name = $name, nullable = $nullable, T = $T, type = $type, inttype = $IT, dictionary id = $(getid(col))"
-    else
-        @debug 1 "building field: name = $name, nullable = $nullable, T = $T, type = $type"
-    end
-    Meta.fieldStart(b)
-    Meta.fieldAddName(b, nameoff)
-    Meta.fieldAddNullable(b, nullable)
-    Meta.fieldAddTypeType(b, type)
-    Meta.fieldAddType(b, typeoff)
-    Meta.fieldAddDictionary(b, dict)
-    Meta.fieldAddChildren(b, children)
-    Meta.fieldAddCustomMetadata(b, meta)
-    return Meta.fieldEnd(b)
-end
-
-struct FieldNode
-    length::Int64
-    null_count::Int64
-end
-
-struct Buffer
-    offset::Int64
-    length::Int64
-end
-
-function makerecordbatchmsg(sch::Tables.Schema{names, types}, columns, alignment) where {names, types}
-    b = FlatBuffers.Builder(1024)
-    recordbatch, bodylen = makerecordbatch(b, sch, columns, alignment)
-    return makemessage(b, Meta.RecordBatch, recordbatch, columns, bodylen)
-end
-
-function makerecordbatch(b, sch::Tables.Schema{names, types}, columns, alignment) where {names, types}
-    nrows = Tables.rowcount(columns)
-    
-    compress = nothing
-    fieldnodes = FieldNode[]
-    fieldbuffers = Buffer[]
-    bufferoffset = 0
-    for col in Tables.Columns(columns)
-        if col isa Compressed
-            compress = compressiontype(col)
-        end
-        bufferoffset = makenodesbuffers!(col, fieldnodes, fieldbuffers, bufferoffset, alignment)
-    end
-    @debug 1 "building record batch message: nrows = $nrows, sch = $sch, compress = $compress"
-
-    # write field nodes objects
-    FN = length(fieldnodes)
-    Meta.recordBatchStartNodesVector(b, FN)
-    for fn in Iterators.reverse(fieldnodes)
-        Meta.createFieldNode(b, fn.length, fn.null_count)
-    end
-    nodes = FlatBuffers.endvector!(b, FN)
-
-    # write buffer objects
-    bodylen = 0
-    BN = length(fieldbuffers)
-    Meta.recordBatchStartBuffersVector(b, BN)
-    for buf in Iterators.reverse(fieldbuffers)
-        Meta.createBuffer(b, buf.offset, buf.length)
-        bodylen += padding(buf.length, alignment)
-    end
-    buffers = FlatBuffers.endvector!(b, BN)
-
-    # compression
-    if compress !== nothing
-        Meta.bodyCompressionStart(b)
-        Meta.bodyCompressionAddCodec(b, compress)
-        Meta.bodyCompressionAddMethod(b, Meta.BodyCompressionMethod.BUFFER)
-        compression = Meta.bodyCompressionEnd(b)
-    else
-        compression = FlatBuffers.UOffsetT(0)
-    end
-
-    # write record batch object
-    @debug 1 "built record batch message: nrows = $nrows, nodes = $fieldnodes, buffers = $fieldbuffers, compress = $compress, bodylen = $bodylen"
-    Meta.recordBatchStart(b)
-    Meta.recordBatchAddLength(b, Int64(nrows))
-    Meta.recordBatchAddNodes(b, nodes)
-    Meta.recordBatchAddBuffers(b, buffers)
-    Meta.recordBatchAddCompression(b, compression)
-    return Meta.recordBatchEnd(b), bodylen
-end
-
-function makedictionarybatchmsg(sch, columns, id, isdelta, alignment)
-    @debug 1 "building dictionary message: id = $id, sch = $sch, isdelta = $isdelta"
-    b = FlatBuffers.Builder(1024)
-    recordbatch, bodylen = makerecordbatch(b, sch, columns, alignment)
-    Meta.dictionaryBatchStart(b)
-    Meta.dictionaryBatchAddId(b, Int64(id))
-    Meta.dictionaryBatchAddData(b, recordbatch)
-    Meta.dictionaryBatchAddIsDelta(b, isdelta)
-    dictionarybatch = Meta.dictionaryBatchEnd(b)
-    return makemessage(b, Meta.DictionaryBatch, dictionarybatch, columns, bodylen)
-end
diff --git a/julia/Arrow/test/arrowjson.jl b/julia/Arrow/test/arrowjson.jl
deleted file mode 100644
index 6e9bccf..0000000
--- a/julia/Arrow/test/arrowjson.jl
+++ /dev/null
@@ -1,611 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-module ArrowJSON
-
-using Mmap
-using StructTypes, JSON3, Tables, SentinelArrays, Arrow
-
-# read json files as "table"
-# write to arrow stream/file
-# read arrow stream/file back
-
-abstract type Type end
-Type() = Null("null")
-StructTypes.StructType(::Base.Type{Type}) = StructTypes.AbstractType()
-
-children(::Base.Type{T}) where {T} = Field[]
-
-mutable struct Int <: Type
-    name::String
-    bitWidth::Int64
-    isSigned::Base.Bool
-end
-
-Int() = Int("", 0, true)
-Type(::Base.Type{T}) where {T <: Integer} = Int("int", 8 * sizeof(T), T <: Signed)
-StructTypes.StructType(::Base.Type{Int}) = StructTypes.Mutable()
-function juliatype(f, x::Int)
-    T = x.bitWidth == 8 ? Int8 : x.bitWidth == 16 ? Int16 :
-        x.bitWidth == 32 ? Int32 : x.bitWidth == 64 ? Int64 : Int128
-    return x.isSigned ? T : unsigned(T)
-end
-
-struct FloatingPoint <: Type
-    name::String
-    precision::String
-end
-
-Type(::Base.Type{T}) where {T <: AbstractFloat} = FloatingPoint("floatingpoint", T == Float16 ? "HALF" : T == Float32 ? "SINGLE" : "DOUBLE")
-StructTypes.StructType(::Base.Type{FloatingPoint}) = StructTypes.Struct()
-juliatype(f, x::FloatingPoint) = x.precision == "HALF" ? Float16 : x.precision == "SINGLE" ? Float32 : Float64
-
-struct FixedSizeBinary <: Type
-    name::String
-    byteWidth::Int64
-end
-
-Type(::Base.Type{NTuple{N, UInt8}}) where {N} = FixedSizeBinary("fixedsizebinary", N)
-children(::Base.Type{NTuple{N, UInt8}}) where {N} = Field[]
-StructTypes.StructType(::Base.Type{FixedSizeBinary}) = StructTypes.Struct()
-juliatype(f, x::FixedSizeBinary) = NTuple{x.byteWidth, UInt8}
-
-struct Decimal <: Type
-    name::String
-    precision::Int32
-    scale::Int32
-end
-
-Type(::Base.Type{Arrow.Decimal{P, S, T}}) where {P, S, T} = Decimal("decimal", P, S)
-StructTypes.StructType(::Base.Type{Decimal}) = StructTypes.Struct()
-juliatype(f, x::Decimal) = Arrow.Decimal{x.precision, x.scale, Int128}
-
-mutable struct Timestamp <: Type
-    name::String
-    unit::String
-    timezone::Union{Nothing ,String}
-end
-
-Timestamp() = Timestamp("", "", nothing)
-unit(U) = U == Arrow.Meta.TimeUnit.SECOND ? "SECOND" :
-          U == Arrow.Meta.TimeUnit.MILLISECOND ? "MILLISECOND" :
-          U == Arrow.Meta.TimeUnit.MICROSECOND ? "MICROSECOND" : "NANOSECOND"
-Type(::Base.Type{Arrow.Timestamp{U, TZ}}) where {U, TZ} = Timestamp("timestamp", unit(U), TZ === nothing ? nothing : String(TZ))
-StructTypes.StructType(::Base.Type{Timestamp}) = StructTypes.Mutable()
-unitT(u) = u == "SECOND" ? Arrow.Meta.TimeUnit.SECOND :
-           u == "MILLISECOND" ? Arrow.Meta.TimeUnit.MILLISECOND :
-           u == "MICROSECOND" ? Arrow.Meta.TimeUnit.MICROSECOND : Arrow.Meta.TimeUnit.NANOSECOND
-juliatype(f, x::Timestamp) = Arrow.Timestamp{unitT(x.unit), x.timezone === nothing ? nothing : Symbol(x.timezone)}
-
-struct Duration <: Type
-    name::String
-    unit::String
-end
-
-Type(::Base.Type{Arrow.Duration{U}}) where {U} = Duration("duration", unit(U))
-StructTypes.StructType(::Base.Type{Duration}) = StructTypes.Struct()
-juliatype(f, x::Duration) = Arrow.Duration{unit%(x.unit)}
-
-struct Date <: Type
-    name::String
-    unit::String
-end
-
-Type(::Base.Type{Arrow.Date{U, T}}) where {U, T} = Date("date", U == Arrow.Meta.DateUnit.DAY ? "DAY" : "MILLISECOND")
-StructTypes.StructType(::Base.Type{Date}) = StructTypes.Struct()
-juliatype(f, x::Date) = Arrow.Date{x.unit == "DAY" ? Arrow.Meta.DateUnit.DAY : Arrow.Meta.DateUnit.MILLISECOND, x.unit == "DAY" ? Int32 : Int64}
-
-struct Time <: Type
-    name::String
-    unit::String
-    bitWidth::Int64
-end
-
-Type(::Base.Type{Arrow.Time{U, T}}) where {U, T} = Time("time", unit(U), 8 * sizeof(T))
-StructTypes.StructType(::Base.Type{Time}) = StructTypes.Struct()
-juliatype(f, x::Time) = Arrow.Time{unitT(x.unit), x.unit == "SECOND" || x.unit == "MILLISECOND" ? Int32 : Int64}
-
-struct Interval <: Type
-    name::String
-    unit::String
-end
-
-Type(::Base.Type{Arrow.Interval{U, T}}) where {U, T} = Interval("interval", U == Arrow.Meta.IntervalUnit.YEAR_MONTH ? "YEAR_MONTH" : "DAY_TIME")
-StructTypes.StructType(::Base.Type{Interval}) = StructTypes.Struct()
-juliatype(f, x::Interval) = Arrow.Interval{x.unit == "YEAR_MONTH" ? Arrow.Meta.IntervalUnit.YEAR_MONTH : Arrow.Meta.IntervalUnit.DAY_TIME, x.unit == "YEAR_MONTH" ? Int32 : Int64}
-
-struct UnionT <: Type
-    name::String
-    mode::String
-    typIds::Vector{Int64}
-end
-
-Type(::Base.Type{Arrow.UnionT{T, typeIds, U}}) where {T, typeIds, U} = UnionT("union", T == Arrow.Meta.UnionMode.Dense ? "DENSE" : "SPARSE", collect(typeIds))
-children(::Base.Type{Arrow.UnionT{T, typeIds, U}}) where {T, typeIds, U} = Field[Field("", fieldtype(U, i), nothing) for i = 1:fieldcount(U)]
-StructTypes.StructType(::Base.Type{UnionT}) = StructTypes.Struct()
-juliatype(f, x::UnionT) = Arrow.UnionT{x.mode == "DENSE" ? Arrow.Meta.UnionMode.DENSE : Arrow.Meta.UnionMode.SPARSE, Tuple(x.typeIds), Tuple{(juliatype(y) for y in f.children)...}}
-
-struct List <: Type
-    name::String
-end
-
-Type(::Base.Type{Vector{T}}) where {T} = List("list")
-children(::Base.Type{Vector{T}}) where {T} = [Field("item", T, nothing)]
-StructTypes.StructType(::Base.Type{List}) = StructTypes.Struct()
-juliatype(f, x::List) = Vector{juliatype(f.children[1])}
-
-struct LargeList <: Type
-    name::String
-end
-
-StructTypes.StructType(::Base.Type{LargeList}) = StructTypes.Struct()
-juliatype(f, x::LargeList) = Vector{juliatype(f.children[1])}
-
-struct FixedSizeList <: Type
-    name::String
-    listSize::Int64
-end
-
-Type(::Base.Type{NTuple{N, T}}) where {N, T} = FixedSizeList("fixedsizelist", N)
-children(::Base.Type{NTuple{N, T}}) where {N, T} = [Field("item", T, nothing)]
-StructTypes.StructType(::Base.Type{FixedSizeList}) = StructTypes.Struct()
-juliatype(f, x::FixedSizeList) = NTuple{x.listSize, juliatype(f.children[1])}
-
-struct Struct <: Type
-    name::String
-end
-
-Type(::Base.Type{NamedTuple{names, types}}) where {names, types} = Struct("struct")
-children(::Base.Type{NamedTuple{names, types}}) where {names, types} = [Field(names[i], fieldtype(types, i), nothing) for i = 1:length(names)]
-StructTypes.StructType(::Base.Type{Struct}) = StructTypes.Struct()
-juliatype(f, x::Struct) = NamedTuple{Tuple(Symbol(x.name) for x in f.children), Tuple{(juliatype(y) for y in f.children)...}}
-
-struct Map <: Type
-    name::String
-    keysSorted::Base.Bool
-end
-
-Type(::Base.Type{Dict{K, V}}) where {K, V} = Map("map", false)
-children(::Base.Type{Dict{K, V}}) where {K, V} = [Field("entries", Arrow.KeyValue{K, V}, nothing)]
-StructTypes.StructType(::Base.Type{Map}) = StructTypes.Struct()
-juliatype(f, x::Map) = Dict{juliatype(f.children[1].children[1]), juliatype(f.children[1].children[2])}
-
-Type(::Base.Type{Arrow.KeyValue{K, V}}) where {K, V} = Struct("struct")
-children(::Base.Type{Arrow.KeyValue{K, V}}) where {K, V} = [Field("key", K, nothing), Field("value", V, nothing)]
-
-struct Null <: Type
-    name::String
-end
-
-Type(::Base.Type{Missing}) = Null("null")
-StructTypes.StructType(::Base.Type{Null}) = StructTypes.Struct()
-juliatype(f, x::Null) = Missing
-
-struct Utf8 <: Type
-    name::String
-end
-
-Type(::Base.Type{<:String}) = Utf8("utf8")
-StructTypes.StructType(::Base.Type{Utf8}) = StructTypes.Struct()
-juliatype(f, x::Utf8) = String
-
-struct LargeUtf8 <: Type
-    name::String
-end
-
-StructTypes.StructType(::Base.Type{LargeUtf8}) = StructTypes.Struct()
-juliatype(f, x::LargeUtf8) = String
-
-struct Binary <: Type
-    name::String
-end
-
-Type(::Base.Type{Vector{UInt8}}) = Binary("binary")
-children(::Base.Type{Vector{UInt8}}) = Field[]
-StructTypes.StructType(::Base.Type{Binary}) = StructTypes.Struct()
-juliatype(f, x::Binary) = Vector{UInt8}
-
-struct LargeBinary <: Type
-    name::String
-end
-
-StructTypes.StructType(::Base.Type{LargeBinary}) = StructTypes.Struct()
-juliatype(f, x::LargeBinary) = Vector{UInt8}
-
-struct Bool <: Type
-    name::String
-end
-
-Type(::Base.Type{Base.Bool}) = Bool("bool")
-StructTypes.StructType(::Base.Type{Bool}) = StructTypes.Struct()
-juliatype(f, x::Bool) = Base.Bool
-
-StructTypes.subtypekey(::Base.Type{Type}) = :name
-
-const SUBTYPES = @eval (
-    int=Int,
-    floatingpoint=FloatingPoint,
-    fixedsizebinary=FixedSizeBinary,
-    decimal=Decimal,
-    timestamp=Timestamp,
-    duration=Duration,
-    date=Date,
-    time=Time,
-    interval=Interval,
-    union=UnionT,
-    list=List,
-    largelist=LargeList,
-    fixedsizelist=FixedSizeList,
-    $(Symbol("struct"))=Struct,
-    map=Map,
-    null=Null,
-    utf8=Utf8,
-    largeutf8=LargeUtf8,
-    binary=Binary,
-    largebinary=LargeBinary,
-    bool=Bool
-)
-
-StructTypes.subtypes(::Base.Type{Type}) = SUBTYPES
-
-const Metadata = Union{Nothing, Vector{NamedTuple{(:key, :value), Tuple{String, String}}}}
-Metadata() = nothing
-
-mutable struct DictEncoding
-    id::Int64
-    indexType::Type
-    isOrdered::Base.Bool
-end
-
-DictEncoding() = DictEncoding(0, Type(), false)
-StructTypes.StructType(::Base.Type{DictEncoding}) = StructTypes.Mutable()
-
-mutable struct Field
-    name::String
-    nullable::Base.Bool
-    type::Type
-    children::Vector{Field}
-    dictionary::Union{DictEncoding, Nothing}
-    metadata::Metadata
-end
-
-Field() = Field("", true, Type(), Field[], nothing, Metadata())
-StructTypes.StructType(::Base.Type{Field}) = StructTypes.Mutable()
-Base.copy(f::Field) = Field(f.name, f.nullable, f.type, f.children, f.dictionary, f.metadata)
-
-function juliatype(f::Field)
-    T = juliatype(f, f.type)
-    return f.nullable ? Union{T, Missing} : T
-end
-
-function Field(nm, ::Base.Type{T}, dictencodings) where {T}
-    S = Arrow.maybemissing(T)
-    type = Type(S)
-    ch = children(S)
-    if dictencodings !== nothing && haskey(dictencodings, nm)
-        dict = dictencodings[nm]
-    else
-        dict = nothing
-    end
-    return Field(nm, T !== S, type, ch, dict, nothing)
-end
-
-mutable struct Schema
-    fields::Vector{Field}
-    metadata::Metadata
-end
-
-Schema() = Schema(Field[], Metadata())
-StructTypes.StructType(::Base.Type{Schema}) = StructTypes.Mutable()
-
-struct Offsets{T} <: AbstractVector{T}
-    data::Vector{T}
-end
-
-Base.size(x::Offsets) = size(x.data)
-Base.getindex(x::Offsets, i::Base.Int) = getindex(x.data, i)
-
-mutable struct FieldData
-    name::String
-    count::Int64
-    VALIDITY::Union{Nothing, Vector{Int8}}
-    OFFSET::Union{Nothing, Offsets}
-    TYPE_ID::Union{Nothing, Vector{Int8}}
-    DATA::Union{Nothing, Vector{Any}}
-    children::Vector{FieldData}
-end
-
-FieldData() = FieldData("", 0, nothing, nothing, nothing, nothing, FieldData[])
-StructTypes.StructType(::Base.Type{FieldData}) = StructTypes.Mutable()
-
-function FieldData(nm, ::Base.Type{T}, col, dictencodings) where {T}
-    if dictencodings !== nothing && haskey(dictencodings, nm)
-        refvals = DataAPI.refarray(col.data)
-        if refvals !== col.data
-            IT = eltype(refvals)
-            col = (x - one(T) for x in refvals)
-        else
-            _, de = dictencodings[nm]
-            IT = de.indexType
-            vals = unique(col)
-            col = Arrow.DictEncoder(col, vals, Arrow.encodingtype(length(vals)))
-        end
-        return FieldData(nm, IT, col, nothing)
-    end
-    S = Arrow.maybemissing(T)
-    len = Arrow._length(col)
-    VALIDITY = OFFSET = TYPE_ID = DATA = nothing
-    children = FieldData[]
-    if S <: Pair
-        return FieldData(nm, Vector{Arrow.KeyValue{Arrow._keytype(S), Arrow._valtype(S)}}, (Arrow.KeyValue(k, v) for (k, v) in pairs(col)))
-    elseif S !== Missing
-        # VALIDITY
-        VALIDITY = Int8[!ismissing(x) for x in col]
-        # OFFSET
-        if S <: Vector || S == String
-            lenfun = S == String ? x->ismissing(x) ? 0 : sizeof(x) : x->ismissing(x) ? 0 : length(x)
-            tot = sum(lenfun, col)
-            if tot > 2147483647
-                OFFSET = String[String(lenfun(x)) for x in col]
-                pushfirst!(OFFSET, "0")
-            else
-                OFFSET = Int32[ismissing(x) ? 0 : lenfun(x) for x in col]
-                pushfirst!(OFFSET, 0)
-            end
-            OFFSET = Offsets(OFFSET)
-            push!(children, FieldData("item", eltype(S), Arrow.flatten(skipmissing(col)), dictencodings))
-        elseif S <: NTuple
-            if Arrow.ArrowTypes.gettype(S) == UInt8
-                DATA = [ismissing(x) ? Arrow.ArrowTypes.default(S) : String(collect(x)) for x in col]
-            else
-                push!(children, FieldData("item", Arrow.ArrowTypes.gettype(S), Arrow.flatten(coalesce(x, Arrow.ArrowTypes.default(S)) for x in col), dictencodings))
-            end
-        elseif S <: NamedTuple
-            for (nm, typ) in zip(fieldnames(S), fieldtypes(S))
-                push!(children, FieldData(String(nm), typ, (getfield(x, nm) for x in col), dictencodings))
-            end
-        elseif S <: Arrow.UnionT
-            U = eltype(S)
-            tids = Arrow.typeids(S) === nothing ? (0:fieldcount(U)) : Arrow.typeids(S)
-            TYPE_ID = [x === missing ? 0 : tids[Arrow.isatypeid(x, U)] for x in col]
-            if Arrow.unionmode(S) == Arrow.Meta.UnionMode.Dense
-                offs = zeros(Int32, fieldcount(U))
-                OFFSET = Int32[]
-                for x in col
-                    idx = x === missing ? 1 : Arrow.isatypeid(x, U)
-                    push!(OFFSET, offs[idx])
-                    offs[idx] += 1
-                end
-                for i = 1:fieldcount(U)
-                    SS = fieldtype(U, i)
-                    push!(children, FieldData("$i", SS, Arrow.filtered(i == 1 ? Union{SS, Missing} : Arrow.maybemissing(SS), col), dictencodings))
-                end
-            else
-                for i = 1:fieldcount(U)
-                    SS = fieldtype(U, i)
-                    push!(children, FieldData("$i", SS, Arrow.replaced(SS, col), dictencodings))
-                end
-            end
-        elseif S <: KeyValue
-            push!(children, FieldData("key", Arrow.keyvalueK(S), (x.key for x in col), dictencodings))
-            push!(children, FieldData("value", Arrow.keyvalueV(S), (x.value for x in col), dictencodings))
-        end
-    end
-    return FieldData(nm, len, VALIDITY, OFFSET, TYPE_ID, DATA, children)
-end
-
-mutable struct RecordBatch
-    count::Int64
-    columns::Vector{FieldData}
-end
-
-RecordBatch() = RecordBatch(0, FieldData[])
-StructTypes.StructType(::Base.Type{RecordBatch}) = StructTypes.Mutable()
-
-mutable struct DictionaryBatch
-    id::Int64
-    data::RecordBatch
-end
-
-DictionaryBatch() = DictionaryBatch(0, RecordBatch())
-StructTypes.StructType(::Base.Type{DictionaryBatch}) = StructTypes.Mutable()
-
-mutable struct DataFile <: Tables.AbstractColumns
-    schema::Schema
-    batches::Vector{RecordBatch}
-    dictionaries::Vector{DictionaryBatch}
-end
-
-Base.propertynames(x::DataFile) = (:schema, :batches, :dictionaries)
-
-function Base.getproperty(df::DataFile, nm::Symbol)
-    if nm === :schema
-        return getfield(df, :schema)
-    elseif nm === :batches
-        return getfield(df, :batches)
-    elseif nm === :dictionaries
-        return getfield(df, :dictionaries)
-    end
-    return Tables.getcolumn(df, nm)
-end
-
-DataFile() = DataFile(Schema(), RecordBatch[], DictionaryBatch[])
-StructTypes.StructType(::Base.Type{DataFile}) = StructTypes.Mutable()
-
-parsefile(file) = JSON3.read(Mmap.mmap(file), DataFile)
-
-# make DataFile satisfy Tables.jl interface
-function Tables.partitions(x::DataFile)
-    if isempty(x.batches)
-        # special case empty batches by producing a single DataFile w/ schema
-        return (DataFile(x.schema, RecordBatch[], x.dictionaries),)
-    else
-        return (DataFile(x.schema, [x.batches[i]], x.dictionaries) for i = 1:length(x.batches))
-    end
-end
-
-Tables.columns(x::DataFile) = x
-
-function Tables.schema(x::DataFile)
-    names = map(x -> x.name, x.schema.fields)
-    types = map(x -> juliatype(x), x.schema.fields)
-    return Tables.Schema(names, types)
-end
-
-Tables.columnnames(x::DataFile) =  map(x -> Symbol(x.name), x.schema.fields)
-
-function Tables.getcolumn(x::DataFile, i::Base.Int)
-    field = x.schema.fields[i]
-    type = juliatype(field)
-    return ChainedVector(ArrowArray{type}[ArrowArray{type}(field, length(x.batches) > 0 ? x.batches[j].columns[i] : FieldData(), x.dictionaries) for j = 1:length(x.batches)])
-end
-
-function Tables.getcolumn(x::DataFile, nm::Symbol)
-    i = findfirst(x -> x.name == String(nm), x.schema.fields)
-    return Tables.getcolumn(x, i)
-end
-
-struct ArrowArray{T} <: AbstractVector{T}
-    field::Field
-    fielddata::FieldData
-    dictionaries::Vector{DictionaryBatch}
-end
-ArrowArray(f::Field, fd::FieldData, d) = ArrowArray{juliatype(f)}(f, fd, d)
-Base.size(x::ArrowArray) = (x.fielddata.count,)
-
-function Base.getindex(x::ArrowArray{T}, i::Base.Int) where {T}
-    @boundscheck checkbounds(x, i)
-    S = Base.nonmissingtype(T)
-    if x.field.dictionary !== nothing
-        fielddata = x.dictionaries[findfirst(y -> y.id == x.field.dictionary.id, x.dictionaries)].data.columns[1]
-        field = copy(x.field)
-        field.dictionary = nothing
-        idx = x.fielddata.DATA[i] + 1
-        return ArrowArray(field, fielddata, x.dictionaries)[idx]
-    end
-    if T === Missing
-        return missing
-    elseif S <: UnionT
-        U = eltype(S)
-        tids = Arrow.typeids(S) === nothing ? (0:fieldcount(U)) : Arrow.typeids(S)
-        typeid = tids[x.fielddata.TYPE_ID[i]]
-        if Arrow.unionmode(S) == Arrow.Meta.UnionMode.DENSE
-            off = x.fielddata.OFFSET[i]
-            return ArrowArray(x.field.children[typeid+1], x.fielddata.children[typeid+1], x.dictionaries)[off]
-        else
-            return ArrowArray(x.field.children[typeid+1], x.fielddata.children[typeid+1], x.dictionaries)[i]
-        end
-    end
-    x.fielddata.VALIDITY[i] == 0 && return missing
-    if S <: Vector{UInt8}
-        return copy(codeunits(x.fielddata.DATA[i]))
-    elseif S <: String
-        return x.fielddata.DATA[i]
-    elseif S <: Vector
-        offs = x.fielddata.OFFSET
-        A = ArrowArray{eltype(S)}(x.field.children[1], x.fielddata.children[1], x.dictionaries)
-        return A[(offs[i] + 1):offs[i + 1]]
-    elseif S <: Dict
-        offs = x.fielddata.OFFSET
-        A = ArrowArray(x.field.children[1], x.fielddata.children[1], x.dictionaries)
-        return Dict(y.key => y.value for y in A[(offs[i] + 1):offs[i + 1]])
-    elseif S <: Tuple
-        if Arrow.ArrowTypes.gettype(S) == UInt8
-            A = x.fielddata.DATA
-            return Tuple(map(UInt8, collect(A[i][1:x.field.type.byteWidth])))
-        else
-            sz = x.field.type.listSize
-            A = ArrowArray{Arrow.ArrowTypes.gettype(S)}(x.field.children[1], x.fielddata.children[1], x.dictionaries)
-            off = (i - 1) * sz + 1
-            return Tuple(A[off:(off + sz - 1)])
-        end
-    elseif S <: NamedTuple
-        data = (ArrowArray(x.field.children[j], x.fielddata.children[j], x.dictionaries)[i] for j = 1:length(x.field.children))
-        return NamedTuple{fieldnames(S)}(Tuple(data))
-    elseif S == Int64 || S == UInt64
-        return parse(S, x.fielddata.DATA[i])
-    elseif S <: Arrow.Decimal
-        str = x.fielddata.DATA[i]
-        return S(parse(Int128, str))
-    elseif S <: Arrow.Date || S <: Arrow.Time
-        val = x.fielddata.DATA[i]
-        return Arrow.storagetype(S) == Int32 ? S(val) : S(parse(Int64, val))
-    elseif S <: Arrow.Timestamp
-        return S(parse(Int64, x.fielddata.DATA[i]))
-    else
-        return S(x.fielddata.DATA[i])
-    end
-end
-
-# take any Tables.jl source and write out arrow json datafile
-function DataFile(source)
-    fields = Field[]
-    metadata = nothing # TODO?
-    batches = RecordBatch[]
-    dictionaries = DictionaryBatch[]
-    dictencodings = Dict{String, Tuple{Base.Type, DictEncoding}}()
-    dictid = Ref(0)
-    for (i, tbl1) in Tables.partitions(source)
-        tbl = Arrow.toarrowtable(tbl1)
-        if i == 1
-            sch = Tables.schema(tbl)
-            for (nm, T, col) in zip(sch.names, sch.types, Tables.Columns(tbl))
-                if col isa Arrow.DictEncode
-                    id = dictid[]
-                    dictid[] += 1
-                    codes = DataAPI.refarray(col.data)
-                    if codes !== col.data
-                        IT = Type(eltype(codes))
-                    else
-                        IT = Type(Arrow.encodingtype(length(unique(col))))
-                    end
-                    dictencodings[String(nm)] = (T, DictEncoding(id, IT, false))
-                end
-                push!(fields, Field(String(nm), T, dictencodings))
-            end
-        end
-        # build record batch
-        len = Tables.rowcount(tbl)
-        columns = FieldData[]
-        for (nm, T, col) in zip(sch.names, sch.types, Tables.Columns(tbl))
-            push!(columns, FieldData(String(nm), T, col, dictencodings))
-        end
-        push!(batches, RecordBatch(len, columns))
-        # build dictionaries
-        for (nm, (T, dictencoding)) in dictencodings
-            column = FieldData(nm, T, Tables.getcolumn(tbl, nm), nothing)
-            recordbatch = RecordBatch(len, [column])
-            push!(dictionaries, DictionaryBatch(dictencoding.id, recordbatch))
-        end
-    end
-    schema = Schema(fields, metadata)
-    return DataFile(schema, batches, dictionaries)
-end
-
-function Base.isequal(df::DataFile, tbl::Arrow.Table)
-    Tables.schema(df) == Tables.schema(tbl) || return false
-    i = 1
-    for (col1, col2) in zip(Tables.Columns(df), Tables.Columns(tbl))
-        if !isequal(col1, col2)
-            @show i
-            return false
-        end
-        i += 1
-    end
-    return true
-end
-
-end
diff --git a/julia/Arrow/test/arrowjson/datetime.json b/julia/Arrow/test/arrowjson/datetime.json
deleted file mode 100644
index 35eace4..0000000
--- a/julia/Arrow/test/arrowjson/datetime.json
+++ /dev/null
@@ -1,911 +0,0 @@
-{
-  "schema": {
-    "fields": [
-      {
-        "name": "f0",
-        "type": {
-          "name": "date",
-          "unit": "DAY"
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f1",
-        "type": {
-          "name": "date",
-          "unit": "MILLISECOND"
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f2",
-        "type": {
-          "name": "time",
-          "unit": "SECOND",
-          "bitWidth": 32
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f3",
-        "type": {
-          "name": "time",
-          "unit": "MILLISECOND",
-          "bitWidth": 32
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f4",
-        "type": {
-          "name": "time",
-          "unit": "MICROSECOND",
-          "bitWidth": 64
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f5",
-        "type": {
-          "name": "time",
-          "unit": "NANOSECOND",
-          "bitWidth": 64
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f6",
-        "type": {
-          "name": "timestamp",
-          "unit": "SECOND"
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f7",
-        "type": {
-          "name": "timestamp",
-          "unit": "MILLISECOND"
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f8",
-        "type": {
-          "name": "timestamp",
-          "unit": "MICROSECOND"
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f9",
-        "type": {
-          "name": "timestamp",
-          "unit": "NANOSECOND"
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f10",
-        "type": {
-          "name": "timestamp",
-          "unit": "MILLISECOND"
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f11",
-        "type": {
-          "name": "timestamp",
-          "unit": "SECOND",
-          "timezone": "UTC"
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f12",
-        "type": {
-          "name": "timestamp",
-          "unit": "MILLISECOND",
-          "timezone": "US/Eastern"
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f13",
-        "type": {
-          "name": "timestamp",
-          "unit": "MICROSECOND",
-          "timezone": "Europe/Paris"
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f14",
-        "type": {
-          "name": "timestamp",
-          "unit": "NANOSECOND",
-          "timezone": "US/Pacific"
-        },
-        "nullable": true,
-        "children": []
-      }
-    ]
-  },
-  "batches": [
-    {
-      "count": 7,
-      "columns": [
-        {
-          "name": "f0",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            0,
-            0,
-            1,
-            0,
-            1,
-            0
-          ],
-          "DATA": [
-            -719162,
-            2932896,
-            1251583,
-            -354578,
-            1947695,
-            -669151,
-            26653
-          ]
-        },
-        {
-          "name": "f1",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            0,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "-62135596800000",
-            "253402214400000",
-            "-2820221740189",
-            "71549882314362",
-            "125067746235030",
-            "-27038791348254",
-            "42137473450326"
-          ]
-        },
-        {
-          "name": "f2",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            0,
-            86400,
-            76127,
-            6700,
-            27103,
-            56151,
-            56654
-          ]
-        },
-        {
-          "name": "f3",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            0,
-            1,
-            0,
-            1,
-            0
-          ],
-          "DATA": [
-            0,
-            86400000,
-            17984286,
-            76370591,
-            60937117,
-            2240575,
-            8788989
-          ]
-        },
-        {
-          "name": "f4",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "0",
-            "86400000000",
-            "74085029005",
-            "23058796418",
-            "5827157574",
-            "30753386088",
-            "41165364667"
-          ]
-        },
-        {
-          "name": "f5",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "0",
-            "86400000000000",
-            "23907445718784",
-            "74345421086418",
-            "75233481254444",
-            "82172159793710",
-            "58497242525071"
-          ]
-        },
-        {
-          "name": "f6",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-62135596800",
-            "253402214400",
-            "-61131551586",
-            "109841560561",
-            "-3916465142",
-            "146694684650",
-            "138850275868"
-          ]
-        },
-        {
-          "name": "f7",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            0,
-            1,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "-62135596800000",
-            "253402214400000",
-            "73881152631437",
-            "69908713976427",
-            "252339755754438",
-            "-24746530024729",
-            "169302540975380"
-          ]
-        },
-        {
-          "name": "f8",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            0,
-            1,
-            0
-          ],
-          "DATA": [
-            "-62135596800000000",
-            "253402214400000000",
-            "201464679490582249",
-            "168126161155504013",
-            "-23403778250906066",
-            "205706554937392102",
-            "45776665091115087"
-          ]
-        },
-        {
-          "name": "f9",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-9223372036854775808",
-            "9223372036854775807",
-            "-6357255048670867880",
-            "-8661447973993819541",
-            "-8212788386909103318",
-            "-8530954041419345600",
-            "-4218486829304453721"
-          ]
-        },
-        {
-          "name": "f10",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            1,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "-62135596800000",
-            "253402214400000",
-            "206983911256609",
-            "94973546379549",
-            "-18473207641060",
-            "36529119814530",
-            "143273969098011"
-          ]
-        },
-        {
-          "name": "f11",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            0,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "-62135596800",
-            "253402214400",
-            "225567855249",
-            "18090198256",
-            "-18645631593",
-            "1683299996",
-            "240974238031"
-          ]
-        },
-        {
-          "name": "f12",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "-62135596800000",
-            "253402214400000",
-            "-41888047432132",
-            "142167692985706",
-            "96450253340232",
-            "-28800292871111",
-            "31551906541089"
-          ]
-        },
-        {
-          "name": "f13",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            1,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "-62135596800000000",
-            "253402214400000000",
-            "-55035741541368439",
-            "110555619232926841",
-            "13584197914180451",
-            "-40099398122556776",
-            "154575532939365500"
-          ]
-        },
-        {
-          "name": "f14",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-9223372036854775808",
-            "9223372036854775807",
-            "5175948389020765869",
-            "5557679156666679724",
-            "4250919303876106324",
-            "9160676477011889469",
-            "8585006913301874724"
-          ]
-        }
-      ]
-    },
-    {
-      "count": 10,
-      "columns": [
-        {
-          "name": "f0",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            0,
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            -719162,
-            2932896,
-            1879965,
-            -566367,
-            37728,
-            1761040,
-            279144,
-            1056794,
-            756303,
-            525725
-          ]
-        },
-        {
-          "name": "f1",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            0,
-            0,
-            0
-          ],
-          "DATA": [
-            "-62135596800000",
-            "253402214400000",
-            "-38092110755085",
-            "-28445105640862",
-            "82150583163219",
-            "54419670636015",
-            "157522888788052",
-            "-41135178703404",
-            "-55692081078291",
-            "23161948344048"
-          ]
-        },
-        {
-          "name": "f2",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            0,
-            1,
-            1,
-            0,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            0,
-            86400,
-            68158,
-            43645,
-            82390,
-            63272,
-            8687,
-            73663,
-            41080,
-            16606
-          ]
-        },
-        {
-          "name": "f3",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            0,
-            0,
-            0,
-            0,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            0,
-            86400000,
-            4300160,
-            71414742,
-            77204018,
-            20632726,
-            31365614,
-            66601445,
-            59573489,
-            62138475
-          ]
-        },
-        {
-          "name": "f4",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "0",
-            "86400000000",
-            "28651881349",
-            "76962235570",
-            "72557141940",
-            "81551970477",
-            "20740172257",
-            "69927019990",
-            "76743031592",
-            "82821335874"
-          ]
-        },
-        {
-          "name": "f5",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            1,
-            0,
-            1,
-            1,
-            0,
-            0,
-            0
-          ],
-          "DATA": [
-            "0",
-            "86400000000000",
-            "69757112637445",
-            "57552711513942",
-            "29426946816946",
-            "34878855922929",
-            "33344575898987",
-            "80887006335433",
-            "34037765279999",
-            "51577535310194"
-          ]
-        },
-        {
-          "name": "f6",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            1,
-            0,
-            1,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-62135596800",
-            "253402214400",
-            "17362133914",
-            "-45891238920",
-            "184875963653",
-            "189805054827",
-            "-58368591641",
-            "11979945774",
-            "-42159999942",
-            "-40114167869"
-          ]
-        },
-        {
-          "name": "f7",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "-62135596800000",
-            "253402214400000",
-            "228991365143433",
-            "169961535994833",
-            "200469360821110",
-            "69234108321383",
-            "198319780924526",
-            "191497586634193",
-            "175427870270356",
-            "57342673854963"
-          ]
-        },
-        {
-          "name": "f8",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            1,
-            1,
-            0,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-62135596800000000",
-            "253402214400000000",
-            "56980200145982394",
-            "181929648203745781",
-            "139102923751831867",
-            "208997257530770666",
-            "16136961511557279",
-            "41555612864958844",
-            "13419848118557598",
-            "87383692083185618"
-          ]
-        },
-        {
-          "name": "f9",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-9223372036854775808",
-            "9223372036854775807",
-            "-7031715621840828119",
-            "6059066797068981521",
-            "-1646261793033501417",
-            "-4220806875454311426",
-            "-5197114782094970725",
-            "-8786449967831538943",
-            "2974021310284646715",
-            "-8065049992539820014"
-          ]
-        },
-        {
-          "name": "f10",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            0,
-            1,
-            0,
-            1,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "-62135596800000",
-            "253402214400000",
-            "178310981489839",
-            "-57632494932898",
-            "-49036210908585",
-            "52895117552494",
-            "171282515196488",
-            "132014017559614",
-            "37589110284897",
-            "-58565057255450"
-          ]
-        },
-        {
-          "name": "f11",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            0,
-            1,
-            1,
-            1,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "-62135596800",
-            "253402214400",
-            "-25861647763",
-            "225566577159",
-            "239974631847",
-            "9594019763",
-            "195861877033",
-            "-19961060193",
-            "75621579368",
-            "-37516489502"
-          ]
-        },
-        {
-          "name": "f12",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-62135596800000",
-            "253402214400000",
-            "197770622812426",
-            "195993483135189",
-            "187243903796151",
-            "193213107363200",
-            "83570298570259",
-            "252571502045214",
-            "129428288356579",
-            "-6553516468568"
-          ]
-        },
-        {
-          "name": "f13",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "-62135596800000000",
-            "253402214400000000",
-            "227405247332731417",
-            "-23876629555725661",
-            "172967389442803645",
-            "214366955371313241",
-            "86933375268516953",
-            "162567381239071692",
-            "40270626452354761",
-            "-9766478375147980"
-          ]
-        },
-        {
-          "name": "f14",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            0,
-            1,
-            1,
-            0,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-9223372036854775808",
-            "9223372036854775807",
-            "-3367778002617009344",
-            "-921482854487607004",
-            "1280046021732622411",
-            "-2084512789553264359",
-            "6774927372092824293",
-            "2301253808511314195",
-            "-447823511949250637",
-            "-9137589927882857097"
-          ]
-        }
-      ]
-    }
-  ]
-}
\ No newline at end of file
diff --git a/julia/Arrow/test/arrowjson/decimal.json b/julia/Arrow/test/arrowjson/decimal.json
deleted file mode 100644
index 6c27800..0000000
--- a/julia/Arrow/test/arrowjson/decimal.json
+++ /dev/null
@@ -1,32948 +0,0 @@
-{
-  "schema": {
-    "fields": [
-      {
-        "name": "f0",
-        "type": {
-          "name": "decimal",
-          "precision": 3,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f1",
-        "type": {
-          "name": "decimal",
-          "precision": 4,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f2",
-        "type": {
-          "name": "decimal",
-          "precision": 5,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f3",
-        "type": {
-          "name": "decimal",
-          "precision": 6,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f4",
-        "type": {
-          "name": "decimal",
-          "precision": 7,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f5",
-        "type": {
-          "name": "decimal",
-          "precision": 8,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f6",
-        "type": {
-          "name": "decimal",
-          "precision": 9,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f7",
-        "type": {
-          "name": "decimal",
-          "precision": 10,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f8",
-        "type": {
-          "name": "decimal",
-          "precision": 11,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f9",
-        "type": {
-          "name": "decimal",
-          "precision": 12,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f10",
-        "type": {
-          "name": "decimal",
-          "precision": 13,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f11",
-        "type": {
-          "name": "decimal",
-          "precision": 14,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f12",
-        "type": {
-          "name": "decimal",
-          "precision": 15,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f13",
-        "type": {
-          "name": "decimal",
-          "precision": 16,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f14",
-        "type": {
-          "name": "decimal",
-          "precision": 17,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f15",
-        "type": {
-          "name": "decimal",
-          "precision": 18,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f16",
-        "type": {
-          "name": "decimal",
-          "precision": 19,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f17",
-        "type": {
-          "name": "decimal",
-          "precision": 20,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f18",
-        "type": {
-          "name": "decimal",
-          "precision": 21,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f19",
-        "type": {
-          "name": "decimal",
-          "precision": 22,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f20",
-        "type": {
-          "name": "decimal",
-          "precision": 23,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f21",
-        "type": {
-          "name": "decimal",
-          "precision": 24,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f22",
-        "type": {
-          "name": "decimal",
-          "precision": 25,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f23",
-        "type": {
-          "name": "decimal",
-          "precision": 26,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f24",
-        "type": {
-          "name": "decimal",
-          "precision": 27,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f25",
-        "type": {
-          "name": "decimal",
-          "precision": 28,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f26",
-        "type": {
-          "name": "decimal",
-          "precision": 29,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f27",
-        "type": {
-          "name": "decimal",
-          "precision": 30,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f28",
-        "type": {
-          "name": "decimal",
-          "precision": 31,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f29",
-        "type": {
-          "name": "decimal",
-          "precision": 32,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f30",
-        "type": {
-          "name": "decimal",
-          "precision": 33,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f31",
-        "type": {
-          "name": "decimal",
-          "precision": 34,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f32",
-        "type": {
-          "name": "decimal",
-          "precision": 35,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f33",
-        "type": {
-          "name": "decimal",
-          "precision": 36,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f34",
-        "type": {
-          "name": "decimal",
-          "precision": 37,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      },
-      {
-        "name": "f35",
-        "type": {
-          "name": "decimal",
-          "precision": 38,
-          "scale": 2
-        },
-        "nullable": true,
-        "children": []
-      }
-    ]
-  },
-  "batches": [
-    {
-      "count": 7,
-      "columns": [
-        {
-          "name": "f0",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "-14265",
-            "-3301",
-            "-20359",
-            "5259",
-            "-10833",
-            "-19794",
-            "22904"
-          ]
-        },
-        {
-          "name": "f1",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "-7020",
-            "7396",
-            "9702",
-            "-29862",
-            "25915",
-            "17934",
-            "23441"
-          ]
-        },
-        {
-          "name": "f2",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            0,
-            0,
-            0,
-            0
-          ],
-          "DATA": [
-            "-6799031",
-            "-6350361",
-            "-7919057",
-            "-2688856",
-            "-2460218",
-            "-5498780",
-            "5580928"
-          ]
-        },
-        {
-          "name": "f3",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-3533649",
-            "643928",
-            "2739361",
-            "-2006582",
-            "7501109",
-            "-2029241",
-            "-6554749"
-          ]
-        },
-        {
-          "name": "f4",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "-1383097445",
-            "-317364446",
-            "39137303",
-            "-203304373",
-            "284864794",
-            "-1902924832",
-            "694509255"
-          ]
-        },
-        {
-          "name": "f5",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            0,
-            1,
-            0
-          ],
-          "DATA": [
-            "1445584597",
-            "-1479256303",
-            "1986678586",
-            "653640925",
-            "-1377647126",
-            "163472005",
-            "-985666433"
-          ]
-        },
-        {
-          "name": "f6",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            0,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "-1032313347",
-            "1968833128",
-            "2108909581",
-            "1727353475",
-            "912414766",
-            "-635823470",
-            "1712241290"
-          ]
-        },
-        {
-          "name": "f7",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            0,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "154945363887",
-            "416184610386",
-            "339640718067",
-            "517615365091",
-            "-225329280656",
-            "170183803130",
-            "-151819857736"
-          ]
-        },
-        {
-          "name": "f8",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            0,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "415862710871",
-            "-497367616337",
-            "-513589024815",
-            "365532163027",
-            "162771025458",
-            "-217545761908",
-            "-177764161272"
-          ]
-        },
-        {
-          "name": "f9",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "139749710369981",
-            "-83794245890178",
-            "-91856283653391",
-            "136480936919831",
-            "139751556918685",
-            "-128711073562986",
-            "57528145123438"
-          ]
-        },
-        {
-          "name": "f10",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "52917228652967",
-            "-94943960868145",
-            "-95787469176401",
-            "138513251818631",
-            "72391840904205",
-            "15456369093039",
-            "43627762817987"
-          ]
-        },
-        {
-          "name": "f11",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "-55939733680307",
-            "109774177882810",
-            "-18751094013174",
-            "66217182114674",
-            "54552088012955",
-            "10184533351464",
-            "-42623419179005"
-          ]
-        },
-        {
-          "name": "f12",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            0,
-            0,
-            0,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "5323426016158598",
-            "-14889118959500841",
-            "17725275930019497",
-            "8976278451851381",
-            "-13047873474817232",
-            "-35698038632707430",
-            "35553131382011652"
-          ]
-        },
-        {
-          "name": "f13",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            0,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "22248347423524568",
-            "21382628128477388",
-            "10729166497278728",
-            "33552782977485637",
-            "-27049764851522362",
-            "-22662588671534830",
-            "34450594992450092"
-          ]
-        },
-        {
-          "name": "f14",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            0,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "1793092340943680288",
-            "6687492261570159758",
-            "-8215335212004105263",
-            "4478178378408252470",
-            "163214921176454093",
-            "8663994206540321487",
-            "-7336047061160684053"
-          ]
-        },
-        {
-          "name": "f15",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            0,
-            1,
-            0
-          ],
-          "DATA": [
-            "-8553613303342920727",
-            "2303004565228604365",
-            "4387581718276767427",
-            "-9190240538897215152",
-            "-2359611618219705826",
-            "-4394705260138635628",
-            "-5098494694180487526"
-          ]
-        },
-        {
-          "name": "f16",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-2066857943452927830158",
-            "903014216615599800157",
-            "-550458216531247342288",
-            "2017634601926315282348",
-            "848752616371714143556",
-            "1142061465959813542452",
-            "1385502695475659971908"
-          ]
-        },
-        {
-          "name": "f17",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            0,
-            1,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "-2114118262682166206082",
-            "1902359162690886678168",
-            "416686278682682920537",
-            "1670406372017974831657",
-            "1672178198289799650411",
-            "1290828393749566925129",
-            "477509832520793484657"
-          ]
-        },
-        {
-          "name": "f18",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-761286776745227142069",
-            "-519173571795485925518",
-            "-1152371818412313184820",
-            "-1693854308571834153795",
-            "-2338147549154180008842",
-            "-722980793690531284755",
-            "-308453791763555488903"
-          ]
-        },
-        {
-          "name": "f19",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "-363139155978036972417607",
-            "-67592154020748584606597",
-            "229641197562527780640996",
-            "-346708211516644122649482",
-            "-1250859224618770910347",
-            "-324094365896255288686912",
-            "587754147918610672847722"
-          ]
-        },
-        {
-          "name": "f20",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            0,
-            0,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-260044582391618089260319",
-            "-571174996847059618302630",
-            "-109712997473546598340520",
-            "-504887878361994310822232",
-            "-79933142040493800702044",
-            "391593215117382882517477",
-            "253411954510878438009626"
-          ]
-        },
-        {
-          "name": "f21",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            1,
-            0,
-            1,
-            0
-          ],
-          "DATA": [
-            "78355683904145007775288840",
-            "-119384708322041630321535990",
-            "-100920737167981116982294279",
-            "83826802691186830123887155",
-            "-142861875795075155563507787",
-            "-145798388286082279657253594",
-            "-25329913740178438718347749"
-          ]
-        },
-        {
-          "name": "f22",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            0,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "2728894406873273319876599",
-            "-26824077854062930008319059",
-            "59282342970725803527905051",
-            "-102858143095488614827267122",
-            "-86746033992194633744162126",
-            "151758737255322612002715481",
-            "-7882766239372883938103148"
-          ]
-        },
-        {
-          "name": "f23",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "-79786812518583385435068134",
-            "-60631765880118763666388680",
-            "-6864681441231111204611249",
-            "13050926774682302829016376",
-            "-70829524080848069150697712",
-            "-82269446396753351313982437",
-            "-64035656687419899366647308"
-          ]
-        },
-        {
-          "name": "f24",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "3955725931924121460907631531",
-            "-21274213085809772733586643935",
-            "-12093030231048856037056919233",
-            "1591415318816864321772119800",
-            "15686477033795236409156083537",
-            "35545421326820517900037881163",
-            "22425766368694890650567525141"
-          ]
-        },
-        {
-          "name": "f25",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            1,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "29018707613584242707294878326",
-            "17791141760763489912825555133",
-            "16531138885547875931113958621",
-            "-30513053118126324165999828125",
-            "15960846333545963474586001659",
-            "36917332355702074649716398096",
-            "-23564003365919469704120973292"
-          ]
-        },
-        {
-          "name": "f26",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            0,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "9213099815853647106837374141135",
-            "-2927321977169069503340222047887",
-            "6334956127042213437236348937051",
-            "421008034487997672364705982278",
-            "-8216535847017377542720322569325",
-            "9010595708854142678424220967811",
-            "4188490944857456828411574213081"
-          ]
-        },
-        {
-          "name": "f27",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "-8400462145843923761912990483225",
-            "-9532520685799568447195818877630",
-            "6941627626857671447061360399506",
-            "-1279966284289260405732779325351",
-            "1498483597016609631513149683949",
-            "-1529118713555007787773854671666",
-            "8107117611898163619507416633696"
-          ]
-        },
-        {
-          "name": "f28",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            0
-          ],
-          "DATA": [
-            "-2915756808824546582752189119657",
-            "689849096940399949037495607644",
-            "6949792968358403147068782203692",
-            "-9209851065708847333103701669232",
-            "-9846735769797668003358129310662",
-            "-828325389508874726576445634733",
-            "-548287410953591263203641276502"
-          ]
-        },
-        {
-          "name": "f29",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "2329344668638312257280241058269271",
-            "-752344765844397811032529707212356",
-            "147451832276294483763966453842451",
-            "2043297198679260475716881353145488",
-            "-1700852196536476328772217191241342",
-            "395231723125832272118398102832808",
-            "1021064751191444192162606435133102"
-          ]
-        },
-        {
-          "name": "f30",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "1059349538643979619795009909676873",
-            "1077517333972932491341963859241254",
-            "-1269329697335528941024149247301687",
-            "1492334714292199711015081732157237",
-            "1164940978846205931486582828955479",
-            "1226701026828005364221866490976254",
-            "-2246642220670050837117291416085961"
-          ]
-        },
-        {
-          "name": "f31",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            0,
-            0,
-            0,
-            0
-          ],
-          "DATA": [
-            "-453427466718461089284644212936857043",
-            "401968647744354893846373970134628607",
-            "296564973980089833086319484394180932",
-            "300255396607176659100995955356045655",
-            "-128210286701929565620215086982795175",
-            "-76575845460847329054975157675278272",
-            "150610860039362064995272346289079343"
-          ]
-        },
-        {
-          "name": "f32",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "421046835537097379208584523685561537",
-            "-12336249922904854000276177315936166",
-            "-643245709818789885569562430725515554",
-            "28125849177083902136768190312091645",
-            "-144070901322770869842559076535428168",
-            "404632889001498427711768281303970316",
-            "484413224979251839912542328701460679"
-          ]
-        },
-        {
-          "name": "f33",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-74076465584946023556734876157740408561",
-            "48775465105440153520372738047296715437",
-            "-27256511570535798216063178902711504522",
-            "17556306662405983159878112274642973563",
-            "-100278707484244689483962841662232621044",
-            "61342616787415403733699116416597761322",
-            "65469616326882669620827062523697818819"
-          ]
-        },
-        {
-          "name": "f34",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            0,
-            0,
-            0,
-            0
-          ],
-          "DATA": [
-            "160988666415816646705442929165000844171",
-            "-150079614246235114587531344502672720143",
-            "130458754430497218349443163826152965786",
-            "162593370774960128510764087469483971628",
-            "100335479568560073327353331226427021396",
-            "154469868317889131666703500879972187378",
-            "109851602321776291877341624608658829262"
-          ]
-        },
-        {
-          "name": "f35",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "90507703396950205824782315866890653146",
-            "33101701517846982239639071710846694736",
-            "-162364169137146690363873495754205930932",
-            "6552361136224904568043204720462599330",
-            "55934073603932057089039430724186991179",
-            "-40518421289171987097923903446970623529",
-            "21964873705187790865866750301783750442"
-          ]
-        }
-      ]
-    },
-    {
-      "count": 10,
-      "columns": [
-        {
-          "name": "f0",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            0,
-            1,
-            1,
-            1,
-            0,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "-4326",
-            "-4742",
-            "24127",
-            "-28455",
-            "-25093",
-            "18893",
-            "6010",
-            "18847",
-            "-29502",
-            "32119"
-          ]
-        },
-        {
-          "name": "f1",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            0,
-            0,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-4350",
-            "6406",
-            "-22249",
-            "-3026",
-            "-27892",
-            "12582",
-            "-26038",
-            "-26810",
-            "-10412",
-            "12059"
-          ]
-        },
-        {
-          "name": "f2",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            1,
-            0,
-            1,
-            0,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "-5437050",
-            "-2975935",
-            "-2708551",
-            "5905122",
-            "7598831",
-            "1837938",
-            "3161296",
-            "4144482",
-            "7971237",
-            "3812932"
-          ]
-        },
-        {
-          "name": "f3",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            0,
-            1,
-            0,
-            0,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "8307058",
-            "5437059",
-            "7848415",
-            "210888",
-            "-7438002",
-            "1251661",
-            "1558298",
-            "-8295786",
-            "-1337540",
-            "4329127"
-          ]
-        },
-        {
-          "name": "f4",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "2143281149",
-            "1036205716",
-            "-483960365",
-            "464783350",
-            "44702295",
-            "1500496151",
-            "2007499481",
-            "-1980167635",
-            "-491561248",
-            "137927958"
-          ]
-        },
-        {
-          "name": "f5",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            1,
-            1,
-            0,
-            0,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "385115557",
-            "-468314229",
-            "-1806182189",
-            "826968059",
-            "1973820915",
-            "-1678321968",
-            "282860534",
-            "-429116070",
-            "-485370474",
-            "-1175964348"
-          ]
-        },
-        {
-          "name": "f6",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-1660401943",
-            "858637970",
-            "1954585251",
-            "896247082",
-            "-1324508546",
-            "1856733620",
-            "-508486488",
-            "-2144890404",
-            "-595464217",
-            "-541883970"
-          ]
-        },
-        {
-          "name": "f7",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            0,
-            1,
-            1,
-            0,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "-475292084638",
-            "-151527930749",
-            "-136244496864",
-            "475724641544",
-            "490808185901",
-            "-380524076056",
-            "347742169602",
-            "-208359763265",
-            "500407573029",
-            "-390649746248"
-          ]
-        },
-        {
-          "name": "f8",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            0,
-            0,
-            1,
-            0,
-            0,
-            1,
-            0
-          ],
-          "DATA": [
-            "-463878565883",
-            "-440350084389",
-            "-489309228695",
-            "403236358497",
-            "22420180364",
-            "461550541832",
-            "166802945594",
-            "-75395045157",
-            "349097107753",
-            "-31923850582"
-          ]
-        },
-        {
-          "name": "f9",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "13351341741346",
-            "132672835377977",
-            "-104121852651737",
-            "-116578873884859",
-            "-93368786381550",
-            "-130866514927019",
-            "-130245922532357",
-            "119422720388976",
-            "39514069962031",
-            "129449354245745"
-          ]
-        },
-        {
-          "name": "f10",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            0,
-            0,
-            1,
-            0,
-            0,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "117562734639864",
-            "-69335041870511",
-            "88065868490266",
-            "-88825080775852",
-            "-33000609471354",
-            "45253821354104",
-            "-19572766862990",
-            "-20998291433421",
-            "-138710717300131",
-            "105346771642837"
-          ]
-        },
-        {
-          "name": "f11",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            1,
-            0,
-            1,
-            1,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "23613726765605",
-            "-122540956874454",
-            "43298244246934",
-            "38914627030131",
-            "-96822287655033",
-            "-117561922155895",
-            "-116901550232631",
-            "-33238646153535",
-            "87255230245952",
-            "-103811103056354"
-          ]
-        },
-        {
-          "name": "f12",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            1,
-            1,
-            1,
-            0,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "14326292611678389",
-            "11679216137082255",
-            "30602968082225040",
-            "-35083463698552038",
-            "13793460528237121",
-            "29475754221579432",
-            "11956818381525770",
-            "25264515687485849",
-            "-9442305734363215",
-            "32709604673194504"
-          ]
-        },
-        {
-          "name": "f13",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            1,
-            0,
-            1,
-            1,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "10752127983223695",
-            "-17671172332493417",
-            "23204581064269137",
-            "25912317958066911",
-            "-6684281122027701",
-            "-5393708227406178",
-            "-705876736520327",
-            "-8271615012961841",
-            "-2359308340581953",
-            "-10178212042804721"
-          ]
-        },
-        {
-          "name": "f14",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            1,
-            0,
-            0,
-            0,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "-785858494820999025",
-            "-1596606269767712061",
-            "-1418974761760001783",
-            "5406463592868106826",
-            "8383949914321587435",
-            "-8953833508549969767",
-            "7221522693757617783",
-            "7517732845880583602",
-            "-919064621805709634",
-            "-5495596733594283265"
-          ]
-        },
-        {
-          "name": "f15",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-8483275882311463690",
-            "8973190785377545539",
-            "5542339023127975216",
-            "-1516710247958149161",
-            "-6505879801430717005",
-            "2140391223240971073",
-            "-582269641777058598",
-            "-5184242226957355683",
-            "5206992463328246120",
-            "-6079741749013867959"
-          ]
-        },
-        {
-          "name": "f16",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            1,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "1593226799266187966947",
-            "-1812161934828717889262",
-            "-2122590131716118763227",
-            "-1027437168205961683538",
-            "1846540008444089681004",
-            "-1156292451907349126710",
-            "-1957435108076953499486",
-            "-950458759560036353191",
-            "-1125440584483224915212",
-            "2247219347276423832973"
-          ]
-        },
-        {
-          "name": "f17",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            0,
-            1,
-            0,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "-635230024201890240957",
-            "180963226287776474583",
-            "-1878152791054730240606",
-            "-1904422293952570992101",
-            "1052835439226626094544",
-            "-1858821189756108906198",
-            "1853668161015001150059",
-            "511899624049166705494",
-            "610084498432376101375",
-            "-1029828641365425116105"
-          ]
-        },
-        {
-          "name": "f18",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            0,
-            0,
-            1,
-            0,
-            0,
-            1,
-            0
-          ],
-          "DATA": [
-            "1885658882244978382784",
-            "565125911580367694969",
-            "-1652560178353597573029",
-            "2133173545067165238416",
-            "-486940365592293262895",
-            "-2060709217874894710408",
-            "1586493850244052619259",
-            "2178470762303491581080",
-            "-1293832362817784170345",
-            "-1858110114261315174517"
-          ]
-        },
-        {
-          "name": "f19",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "-369225411953896208185462",
-            "-432125813663454581808312",
-            "-144256737060926287909147",
-            "437531850902150565537267",
-            "-135152047695882388519227",
-            "200989985671128391254839",
-            "412064182108667107703143",
-            "155783836458581176854752",
-            "-227172778832455276260133",
-            "212767460062823457968604"
-          ]
-        },
-        {
-          "name": "f20",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            1,
-            0,
-            1,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "403503153546865033134324",
-            "286440584824595490396339",
-            "191634056603308191156299",
-            "530416421236246257260161",
-            "-317587706869080743041040",
-            "-171530648886789759204250",
-            "222798032246148363279215",
-            "72219173848107712300756",
-            "-281960273386152702267354",
-            "-495494776734887426338667"
-          ]
-        },
-        {
-          "name": "f21",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            0,
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "122137352759155073399221518",
-            "-78109201384474477075454853",
-            "-119068241308521301444272353",
-            "-108756965225823937786847241",
-            "-15428358494009873779245484",
-            "19614899796809003767318009",
-            "-115295286224016867112586811",
-            "-8633742807795124483538483",
-            "-150410722825301769338372723",
-            "124250719445278286695627796"
-          ]
-        },
-        {
-          "name": "f22",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            1,
-            1,
-            1,
-            0,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "-99183180993590467578076798",
-            "-36808132925962854413539892",
-            "-125679607816807665333773367",
-            "-136622111609623584591267546",
-            "-17266188400280841341905013",
-            "-71769330665427864647331885",
-            "-145814031084605056753603019",
-            "-125056942233093610827766296",
-            "9249166279701860198655516",
-            "126878824313892664602568663"
-          ]
-        },
-        {
-          "name": "f23",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            1,
-            1,
-            0,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "69744825329510933906746340",
-            "-30122586028854363823971329",
-            "22212149164944035060004867",
-            "-25973928380545009708020589",
-            "50567555142199657207098957",
-            "-40021098669624532176421808",
-            "-33330934345289112585983041",
-            "-124012214741805393399462125",
-            "-138226312121129798637686830",
-            "2273005251425309465489619"
-          ]
-        },
-        {
-          "name": "f24",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "26842282533862498382939405997",
-            "17787994211657168910392131484",
-            "7523952681100831594207033636",
-            "18257283108725825658512455316",
-            "-15469531382712880431453128966",
-            "-5009369258227088832031787745",
-            "-35598690213497115663830724084",
-            "22246953732785347649151349772",
-            "9330915514049730010034508310",
-            "-26800441656550330938009306707"
-          ]
-        },
-        {
-          "name": "f25",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            1,
-            1,
-            0,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "-36331212919615640889272411104",
-            "36192450254329815033783240839",
-            "37749265523377026412760210674",
-            "1776086174556031952147671519",
-            "15362218353534633991283119679",
-            "34413146026389235392142970662",
-            "14292777901465553588604435837",
-            "-17090082469158479303098433453",
-            "18688801441734020761551793736",
-            "-23859805527670127110643731069"
-          ]
-        },
-        {
-          "name": "f26",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            1,
-            0,
-            1,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "2220103303731789573334199701663",
-            "6237111177014715580774407162796",
-            "-6338471790385983381447055194941",
-            "2250388387229112362557012222382",
-            "-3919498269201660475165992143963",
-            "-1799934177913445118487985955729",
-            "-2323304519983949754039480799426",
-            "-1301131662575316944670816531905",
-            "-4265160933868529886993472786495",
-            "-4292953046034527295812111533975"
-          ]
-        },
-        {
-          "name": "f27",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            0,
-            0,
-            1,
-            1,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "-3484925219213002239805523064883",
-            "6528552859477682991951516050785",
-            "-765962107198382441283095455373",
-            "-1397137479831402026027607048248",
-            "3961682476407229854971103624981",
-            "5975195883181022585096451556775",
-            "-9676800477604513004081790052790",
-            "-5703219619447316623534739055181",
-            "6129137758274201914352345247446",
-            "771936840958552475578155475366"
-          ]
-        },
-        {
-          "name": "f28",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            0,
-            1,
-            1,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "7726836802487400261012835642739",
-            "-2899737712900797452349686903367",
-            "-4699841764135292946953397202917",
-            "7418179251651709234455608105088",
-            "8120397131521471010221697932900",
-            "6526213244777384369820937871939",
-            "-3774567743925527043971960901589",
-            "-8873736034246294395374537274419",
-            "488289844645088756680442972516",
-            "-2388144175550709792515001316277"
-          ]
-        },
-        {
-          "name": "f29",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            1,
-            0,
-            0,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "-47477340538926222211104914237698",
-            "320639937921939986635581989855928",
-            "861736289842228881011999521004060",
-            "-1205256013177842982651274099236361",
-            "-39643995957385740378372042150003",
-            "-1210790336528949301250817830946444",
-            "1182423445445031739603473987778350",
-            "-640337212803879852231150942968813",
-            "-567587726713873060654848198630762",
-            "280641360776654488558911759135115"
-          ]
-        },
-        {
-          "name": "f30",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "-866285575875610221959113146637304",
-            "2344770413948196340883160931087052",
-            "1889857580890564350097047877514790",
-            "-2568726710340227178480991779371723",
-            "-2485213596075291138559974328614959",
-            "-62378076407090361966505761644273",
-            "-2113433394374145039336066697772466",
-            "-2389429054181728925012886258903305",
-            "-2525262726376576108925615177598533",
-            "-604526323558062739370715910785200"
-          ]
-        },
-        {
-          "name": "f31",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            1,
-            1,
-            0,
-            1,
-            0,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "-411175611485478784812936027850047826",
-            "457804241550769132910430204506150717",
-            "1549607974636127184352732575182742",
-            "390025168459008888512891394303962448",
-            "-490664276836807094735857019444661312",
-            "-450640457243898505844766529165221166",
-            "-553339316764622659577593048830066531",
-            "19986969431028149966548563329309114",
-            "512190967497020823088167980283378342",
-            "-587244163212803971173675798133411979"
-          ]
-        },
-        {
-          "name": "f32",
-          "count": 10,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            0,
-            0,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "246512063483920590600659938977719645",
-            "483324212708049237205831199756205159",
-            "195186088040813738593870463346937474",
-            "-331572691964567402179138728488891845",
-            "466186594735574058231047101478621597",
-            "80512186957271594252263140708656889",
-            "-621018503026533776126752549530840325",
-            "-484705410290628923401085082564299400",
-            "-322852670767952007714303037031011136",
-            "306969564371703225579907932616560177"
-          ]
-        },
-        {
-          "name": "f33",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            0,
-            0,
-            0,
-            0,
-            1,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "52513592802194657300546985265901321701",
-            "32396354459564619210286503831104805894",
-            "-152800597997887046528471654246818061800",
-            "16886738551251131147537573820997523099",
-            "-108529890244550965992486427743581407764",
-            "-107805100101953838884357149548341742683",
-            "-152428185005223769908895650096091179800",
-            "38697638049920608411728929261890502602",
-            "-53183835204559125450983905289890638199",
-            "-25198915522830164616311861007733226015"
-          ]
-        },
-        {
-          "name": "f34",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            0,
-            0,
-            1,
-            1,
-            1,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "-29473659368749332611063061562898178665",
-            "-64181392425515474831866849063339846755",
-            "90557155313004461723928910484647118568",
-            "69982410088115917703700629187142714952",
-            "137281427883875761429267863353473899975",
-            "66221749426872522877433672036305282198",
-            "139898216716793863940328165403822713329",
-            "-17999019985160863257629297875199464798",
-            "74582888771441381839018581558057879959",
-            "153431389525940565745457493557728519648"
-          ]
-        },
-        {
-          "name": "f35",
-          "count": 10,
-          "VALIDITY": [
-            0,
-            1,
-            0,
-            0,
-            1,
-            0,
-            1,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "132130437221517226508875210221998086214",
-            "-53955540660976668373224815268780394481",
-            "-94671974318719360571824313264981742616",
-            "75940549933612625873217731049581943788",
-            "-58970299111050400318748873232294460623",
-            "106861808580585226388022589188291458328",
-            "151810935532684619700572858784852122078",
-            "147244409300271119764509412340177481716",
-            "139138676242120433411406371456479693081",
-            "58927563101546159870009467050083778558"
-          ]
-        }
-      ]
-    },
-    {
-      "count": 7,
-      "columns": [
-        {
-          "name": "f0",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            0
-          ],
-          "DATA": [
-            "25114",
-            "-12925",
-            "-1074",
-            "28646",
-            "-16569",
-            "-16770",
-            "-27151"
-          ]
-        },
-        {
-          "name": "f1",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            0,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "26533",
-            "12085",
-            "25312",
-            "23445",
-            "13356",
-            "-2369",
-            "-29785"
-          ]
-        },
-        {
-          "name": "f2",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            0,
-            1,
-            1,
-            0,
-            1
-          ],
-          "DATA": [
-            "-4380859",
-            "-157657",
-            "-4200925",
-            "6061632",
-            "1972423",
-            "-3016747",
-            "2864606"
-          ]
-        },
-        {
-          "name": "f3",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            0,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-7045373",
-            "-7371800",
-            "1427921",
-            "-7601068",
-            "2525157",
-            "-6433403",
-            "-2477014"
-          ]
-        },
-        {
-          "name": "f4",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            0,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "-141345816",
-            "1543059503",
-            "1211501603",
-            "369888761",
-            "894246844",
-            "736166799",
-            "409408642"
-          ]
-        },
-        {
-          "name": "f5",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            1,
-            0,
-            0
-          ],
-          "DATA": [
-            "-1553193945",
-            "1491657644",
-            "376067040",
-            "-1100453955",
-            "1763626627",
-            "1237652218",
-            "-1680624462"
-          ]
-        },
-        {
-          "name": "f6",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            0,
-            1,
-            0,
-            1,
-            1,
-            0
-          ],
-          "DATA": [
-            "102290167",
-            "1044278987",
-            "501196467",
-            "32275276",
-            "-1384353672",
-            "594927697",
-            "-1482240101"
-          ]
-        },
-        {
-          "name": "f7",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            0,
-            1,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "431716525885",
-            "-396649752792",
-            "-252538061544",
-            "148825582538",
-            "536082535524",
-            "-135755487371",
-            "425356768774"
-          ]
-        },
-        {
-          "name": "f8",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            0,
-            1,
-            1
-          ],
-          "DATA": [
-            "137916147840",
-            "21393720222",
-            "-325824439981",
-            "278743651518",
-            "-49501503483",
-            "-419736800011",
-            "503854890502"
-          ]
-        },
-        {
-          "name": "f9",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "-137642247974694",
-            "-126053382015273",
-            "32896127803947",
-            "17450989064436",
-            "-120690899531858",
-            "109217312768874",
-            "-79259001409277"
-          ]
-        },
-        {
-          "name": "f10",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            1,
-            1,
-            1,
-            1
-          ],
-          "DATA": [
-            "114207744387703",
-            "-79306413416767",
-            "139536513022560",
-            "-98423452444221",
-            "78791182271051",
-            "-35285009371089",
-            "41940974948329"
-          ]
-        },
-        {
-          "name": "f11",
-          "count": 7,
-          "VALIDITY": [
-            1,
-            1,
-            1,
-            0,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "-27264096490547",
-            "136834819442083",
-            "-27088394932437",
-            "-98987047379901",
-            "125550952272750",
-            "-104804800493942",
-            "8768888141448"
-          ]
-        },
-        {
-          "name": "f12",
-          "count": 7,
-          "VALIDITY": [
-            0,
-            0,
-            0,
-            0,
-            0,
-            0,
-            1
-          ],
-          "DATA": [
-            "23677368293754508",
-            "-30279797614274598",
... 35855 lines suppressed ...

[arrow-rs] 11/14: Removed cpp.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit 92a3eece007f2128bf88add1767c2dded7c317c9
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:22:57 2021 +0000

    Removed cpp.
---
 cpp/.gitignore                                     |   43 -
 cpp/Brewfile                                       |   44 -
 cpp/CHANGELOG_PARQUET.md                           |  501 --
 cpp/CMakeLists.txt                                 |  925 ---
 cpp/CMakeSettings.json                             |   21 -
 cpp/README.md                                      |   34 -
 cpp/apidoc/.gitignore                              |    1 -
 cpp/apidoc/Doxyfile                                | 2551 -------
 cpp/apidoc/HDFS.md                                 |   83 -
 cpp/apidoc/footer.html                             |   31 -
 cpp/apidoc/tutorials/plasma.md                     |  450 --
 cpp/apidoc/tutorials/tensor_to_py.md               |  127 -
 cpp/build-support/asan_symbolize.py                |  368 -
 cpp/build-support/build-lz4-lib.sh                 |   25 -
 cpp/build-support/build-zstd-lib.sh                |   25 -
 cpp/build-support/cpplint.py                       | 6477 ----------------
 cpp/build-support/fuzzing/generate_corpuses.sh     |   52 -
 cpp/build-support/fuzzing/pack_corpus.py           |   54 -
 cpp/build-support/get-upstream-commit.sh           |   25 -
 cpp/build-support/iwyu/iwyu-filter.awk             |   96 -
 cpp/build-support/iwyu/iwyu.sh                     |   90 -
 cpp/build-support/iwyu/iwyu_tool.py                |  280 -
 cpp/build-support/iwyu/mappings/arrow-misc.imp     |   61 -
 .../iwyu/mappings/boost-all-private.imp            | 4166 ----------
 cpp/build-support/iwyu/mappings/boost-all.imp      | 5679 --------------
 cpp/build-support/iwyu/mappings/boost-extra.imp    |   23 -
 cpp/build-support/iwyu/mappings/gflags.imp         |   20 -
 cpp/build-support/iwyu/mappings/glog.imp           |   27 -
 cpp/build-support/iwyu/mappings/gmock.imp          |   23 -
 cpp/build-support/iwyu/mappings/gtest.imp          |   26 -
 cpp/build-support/lint_cpp_cli.py                  |  128 -
 cpp/build-support/lint_exclusions.txt              |   12 -
 cpp/build-support/lintutils.py                     |  109 -
 cpp/build-support/lsan-suppressions.txt            |   21 -
 cpp/build-support/run-infer.sh                     |   48 -
 cpp/build-support/run-test.sh                      |  237 -
 cpp/build-support/run_clang_format.py              |  137 -
 cpp/build-support/run_clang_tidy.py                |  124 -
 cpp/build-support/run_cpplint.py                   |  132 -
 cpp/build-support/sanitizer-disallowed-entries.txt |   25 -
 cpp/build-support/stacktrace_addr2line.pl          |   92 -
 cpp/build-support/trim-boost.sh                    |   72 -
 cpp/build-support/tsan-suppressions.txt            |   19 -
 cpp/build-support/ubsan-suppressions.txt           |   16 -
 cpp/build-support/update-flatbuffers.sh            |   41 -
 cpp/build-support/update-thrift.sh                 |   23 -
 cpp/build-support/vendor-flatbuffers.sh            |   31 -
 cpp/cmake_modules/BuildUtils.cmake                 |  947 ---
 cpp/cmake_modules/DefineOptions.cmake              |  579 --
 cpp/cmake_modules/Find-c-aresAlt.cmake             |   71 -
 cpp/cmake_modules/FindArrow.cmake                  |  438 --
 cpp/cmake_modules/FindArrowCUDA.cmake              |   91 -
 cpp/cmake_modules/FindArrowDataset.cmake           |   91 -
 cpp/cmake_modules/FindArrowFlight.cmake            |   92 -
 cpp/cmake_modules/FindArrowFlightTesting.cmake     |  103 -
 cpp/cmake_modules/FindArrowPython.cmake            |   90 -
 cpp/cmake_modules/FindArrowPythonFlight.cmake      |   99 -
 cpp/cmake_modules/FindArrowTesting.cmake           |   91 -
 cpp/cmake_modules/FindBoostAlt.cmake               |   63 -
 cpp/cmake_modules/FindBrotli.cmake                 |  133 -
 cpp/cmake_modules/FindClangTools.cmake             |  103 -
 cpp/cmake_modules/FindGLOG.cmake                   |   54 -
 cpp/cmake_modules/FindGandiva.cmake                |   97 -
 cpp/cmake_modules/FindInferTools.cmake             |   47 -
 cpp/cmake_modules/FindLLVMAlt.cmake                |   81 -
 cpp/cmake_modules/FindLz4.cmake                    |   85 -
 cpp/cmake_modules/FindNumPy.cmake                  |   96 -
 cpp/cmake_modules/FindORC.cmake                    |   53 -
 cpp/cmake_modules/FindOpenSSLAlt.cmake             |   54 -
 cpp/cmake_modules/FindParquet.cmake                |  130 -
 cpp/cmake_modules/FindPlasma.cmake                 |  106 -
 cpp/cmake_modules/FindPython3Alt.cmake             |   99 -
 cpp/cmake_modules/FindPythonLibsNew.cmake          |  267 -
 cpp/cmake_modules/FindRapidJSONAlt.cmake           |   74 -
 cpp/cmake_modules/FindSnappy.cmake                 |   63 -
 cpp/cmake_modules/FindThrift.cmake                 |  142 -
 cpp/cmake_modules/FindgRPCAlt.cmake                |   82 -
 cpp/cmake_modules/FindgflagsAlt.cmake              |   59 -
 cpp/cmake_modules/Findjemalloc.cmake               |   94 -
 cpp/cmake_modules/Findre2Alt.cmake                 |   85 -
 cpp/cmake_modules/Findutf8proc.cmake               |   70 -
 cpp/cmake_modules/Findzstd.cmake                   |   89 -
 cpp/cmake_modules/SetupCxxFlags.cmake              |  634 --
 cpp/cmake_modules/ThirdpartyToolchain.cmake        | 2959 --------
 cpp/cmake_modules/UseCython.cmake                  |  184 -
 cpp/cmake_modules/Usevcpkg.cmake                   |  217 -
 cpp/cmake_modules/san-config.cmake                 |  133 -
 cpp/examples/arrow/CMakeLists.txt                  |   36 -
 .../arrow/dataset_documentation_example.cc         |  355 -
 cpp/examples/arrow/dataset_parquet_scan_example.cc |  188 -
 cpp/examples/arrow/row_wise_conversion_example.cc  |  190 -
 cpp/examples/minimal_build/.gitignore              |   18 -
 cpp/examples/minimal_build/CMakeLists.txt          |   40 -
 cpp/examples/minimal_build/README.md               |   88 -
 cpp/examples/minimal_build/build_arrow.sh          |   35 -
 cpp/examples/minimal_build/build_example.sh        |   27 -
 cpp/examples/minimal_build/docker-compose.yml      |   51 -
 cpp/examples/minimal_build/example.cc              |   73 -
 cpp/examples/minimal_build/minimal.dockerfile      |   26 -
 cpp/examples/minimal_build/run.sh                  |   48 -
 cpp/examples/minimal_build/run_static.bat          |   88 -
 cpp/examples/minimal_build/run_static.sh           |   90 -
 .../minimal_build/system_dependency.dockerfile     |   43 -
 cpp/examples/minimal_build/test.csv                |    3 -
 cpp/examples/parquet/CMakeLists.txt                |   78 -
 .../low_level_api/encryption_reader_writer.cc      |  454 --
 .../encryption_reader_writer_all_crypto_options.cc |  664 --
 .../parquet/low_level_api/reader_writer.cc         |  413 -
 cpp/examples/parquet/low_level_api/reader_writer.h |   71 -
 .../parquet/low_level_api/reader_writer2.cc        |  434 --
 cpp/examples/parquet/parquet_arrow/CMakeLists.txt  |   42 -
 cpp/examples/parquet/parquet_arrow/README.md       |   20 -
 .../parquet/parquet_arrow/reader_writer.cc         |  144 -
 .../parquet_stream_api/stream_reader_writer.cc     |  326 -
 cpp/src/arrow/ArrowConfig.cmake.in                 |   92 -
 cpp/src/arrow/ArrowTestingConfig.cmake.in          |   36 -
 cpp/src/arrow/CMakeLists.txt                       |  708 --
 cpp/src/arrow/adapters/orc/CMakeLists.txt          |   61 -
 cpp/src/arrow/adapters/orc/adapter.cc              |  478 --
 cpp/src/arrow/adapters/orc/adapter.h               |  149 -
 cpp/src/arrow/adapters/orc/adapter_test.cc         |  160 -
 cpp/src/arrow/adapters/orc/adapter_util.cc         |  430 --
 cpp/src/arrow/adapters/orc/adapter_util.h          |   41 -
 cpp/src/arrow/adapters/orc/arrow-orc.pc.in         |   24 -
 cpp/src/arrow/adapters/tensorflow/CMakeLists.txt   |   21 -
 .../adapters/tensorflow/arrow-tensorflow.pc.in     |   24 -
 cpp/src/arrow/adapters/tensorflow/convert.h        |  128 -
 cpp/src/arrow/api.h                                |   44 -
 cpp/src/arrow/array.h                              |   32 -
 cpp/src/arrow/array/CMakeLists.txt                 |   26 -
 cpp/src/arrow/array/README.md                      |   20 -
 cpp/src/arrow/array/array_base.cc                  |  308 -
 cpp/src/arrow/array/array_base.h                   |  258 -
 cpp/src/arrow/array/array_binary.cc                |  108 -
 cpp/src/arrow/array/array_binary.h                 |  248 -
 cpp/src/arrow/array/array_binary_test.cc           |  835 --
 cpp/src/arrow/array/array_decimal.cc               |   63 -
 cpp/src/arrow/array/array_decimal.h                |   66 -
 cpp/src/arrow/array/array_dict.cc                  |  442 --
 cpp/src/arrow/array/array_dict.h                   |  180 -
 cpp/src/arrow/array/array_dict_test.cc             | 1678 -----
 cpp/src/arrow/array/array_list_test.cc             | 1134 ---
 cpp/src/arrow/array/array_nested.cc                |  757 --
 cpp/src/arrow/array/array_nested.h                 |  523 --
 cpp/src/arrow/array/array_primitive.cc             |   99 -
 cpp/src/arrow/array/array_primitive.h              |  135 -
 cpp/src/arrow/array/array_struct_test.cc           |  610 --
 cpp/src/arrow/array/array_test.cc                  | 2948 --------
 cpp/src/arrow/array/array_union_test.cc            |  582 --
 cpp/src/arrow/array/array_view_test.cc             |  441 --
 cpp/src/arrow/array/builder_adaptive.cc            |  380 -
 cpp/src/arrow/array/builder_adaptive.h             |  203 -
 cpp/src/arrow/array/builder_base.cc                |  136 -
 cpp/src/arrow/array/builder_base.h                 |  270 -
 cpp/src/arrow/array/builder_binary.cc              |  199 -
 cpp/src/arrow/array/builder_binary.h               |  632 --
 cpp/src/arrow/array/builder_decimal.cc             |  105 -
 cpp/src/arrow/array/builder_decimal.h              |   92 -
 cpp/src/arrow/array/builder_dict.cc                |  204 -
 cpp/src/arrow/array/builder_dict.h                 |  571 --
 cpp/src/arrow/array/builder_nested.cc              |  294 -
 cpp/src/arrow/array/builder_nested.h               |  482 --
 cpp/src/arrow/array/builder_primitive.cc           |  138 -
 cpp/src/arrow/array/builder_primitive.h            |  478 --
 cpp/src/arrow/array/builder_time.h                 |   43 -
 cpp/src/arrow/array/builder_union.cc               |  121 -
 cpp/src/arrow/array/builder_union.h                |  235 -
 cpp/src/arrow/array/concatenate.cc                 |  490 --
 cpp/src/arrow/array/concatenate.h                  |   42 -
 cpp/src/arrow/array/concatenate_test.cc            |  386 -
 cpp/src/arrow/array/data.cc                        |  333 -
 cpp/src/arrow/array/data.h                         |  260 -
 cpp/src/arrow/array/dict_internal.h                |  193 -
 cpp/src/arrow/array/diff.cc                        |  784 --
 cpp/src/arrow/array/diff.h                         |   76 -
 cpp/src/arrow/array/diff_test.cc                   |  688 --
 cpp/src/arrow/array/util.cc                        |  745 --
 cpp/src/arrow/array/util.h                         |   78 -
 cpp/src/arrow/array/validate.cc                    |  657 --
 cpp/src/arrow/array/validate.h                     |   55 -
 cpp/src/arrow/arrow-config.cmake                   |   26 -
 cpp/src/arrow/arrow-testing.pc.in                  |   27 -
 cpp/src/arrow/arrow.pc.in                          |   29 -
 cpp/src/arrow/buffer.cc                            |  313 -
 cpp/src/arrow/buffer.h                             |  508 --
 cpp/src/arrow/buffer_builder.h                     |  419 --
 cpp/src/arrow/buffer_test.cc                       |  852 ---
 cpp/src/arrow/builder.cc                           |  222 -
 cpp/src/arrow/builder.h                            |   32 -
 cpp/src/arrow/builder_benchmark.cc                 |  453 --
 cpp/src/arrow/c/CMakeLists.txt                     |   22 -
 cpp/src/arrow/c/abi.h                              |  103 -
 cpp/src/arrow/c/bridge.cc                          | 1712 -----
 cpp/src/arrow/c/bridge.h                           |  197 -
 cpp/src/arrow/c/bridge_benchmark.cc                |  159 -
 cpp/src/arrow/c/bridge_test.cc                     | 2946 --------
 cpp/src/arrow/c/helpers.h                          |  117 -
 cpp/src/arrow/c/util_internal.h                    |   85 -
 cpp/src/arrow/chunked_array.cc                     |  267 -
 cpp/src/arrow/chunked_array.h                      |  248 -
 cpp/src/arrow/chunked_array_test.cc                |  244 -
 cpp/src/arrow/compare.cc                           | 1304 ----
 cpp/src/arrow/compare.h                            |  133 -
 cpp/src/arrow/compare_benchmark.cc                 |  164 -
 cpp/src/arrow/compute/CMakeLists.txt               |   70 -
 cpp/src/arrow/compute/README.md                    |   58 -
 cpp/src/arrow/compute/api.h                        |   35 -
 cpp/src/arrow/compute/api_aggregate.cc             |   77 -
 cpp/src/arrow/compute/api_aggregate.h              |  407 -
 cpp/src/arrow/compute/api_scalar.cc                |  152 -
 cpp/src/arrow/compute/api_scalar.h                 |  434 --
 cpp/src/arrow/compute/api_vector.cc                |  162 -
 cpp/src/arrow/compute/api_vector.h                 |  379 -
 cpp/src/arrow/compute/arrow-compute.pc.in          |   21 -
 cpp/src/arrow/compute/cast.cc                      |  245 -
 cpp/src/arrow/compute/cast.h                       |  171 -
 cpp/src/arrow/compute/cast_internal.h              |   42 -
 cpp/src/arrow/compute/exec.cc                      |  997 ---
 cpp/src/arrow/compute/exec.h                       |  242 -
 cpp/src/arrow/compute/exec_internal.h              |  142 -
 cpp/src/arrow/compute/exec_test.cc                 |  858 ---
 cpp/src/arrow/compute/function.cc                  |  295 -
 cpp/src/arrow/compute/function.h                   |  354 -
 cpp/src/arrow/compute/function_benchmark.cc        |  184 -
 cpp/src/arrow/compute/function_test.cc             |  234 -
 cpp/src/arrow/compute/kernel.cc                    |  473 --
 cpp/src/arrow/compute/kernel.h                     |  753 --
 cpp/src/arrow/compute/kernel_test.cc               |  503 --
 cpp/src/arrow/compute/kernels/CMakeLists.txt       |   67 -
 cpp/src/arrow/compute/kernels/aggregate_basic.cc   |  347 -
 .../arrow/compute/kernels/aggregate_basic_avx2.cc  |   77 -
 .../compute/kernels/aggregate_basic_avx512.cc      |   78 -
 .../compute/kernels/aggregate_basic_internal.h     |  397 -
 .../arrow/compute/kernels/aggregate_benchmark.cc   |  752 --
 cpp/src/arrow/compute/kernels/aggregate_internal.h |  165 -
 cpp/src/arrow/compute/kernels/aggregate_mode.cc    |  369 -
 .../arrow/compute/kernels/aggregate_quantile.cc    |  462 --
 cpp/src/arrow/compute/kernels/aggregate_tdigest.cc |  153 -
 cpp/src/arrow/compute/kernels/aggregate_test.cc    | 1743 -----
 cpp/src/arrow/compute/kernels/aggregate_var_std.cc |  284 -
 cpp/src/arrow/compute/kernels/codegen_internal.cc  |  327 -
 cpp/src/arrow/compute/kernels/codegen_internal.h   | 1258 ----
 cpp/src/arrow/compute/kernels/common.h             |   54 -
 cpp/src/arrow/compute/kernels/hash_aggregate.cc    | 1066 ---
 .../arrow/compute/kernels/hash_aggregate_test.cc   |  703 --
 cpp/src/arrow/compute/kernels/scalar_arithmetic.cc |  501 --
 .../compute/kernels/scalar_arithmetic_benchmark.cc |  159 -
 .../compute/kernels/scalar_arithmetic_test.cc      |  821 --
 cpp/src/arrow/compute/kernels/scalar_boolean.cc    |  503 --
 .../compute/kernels/scalar_boolean_benchmark.cc    |   59 -
 .../arrow/compute/kernels/scalar_boolean_test.cc   |  141 -
 .../arrow/compute/kernels/scalar_cast_benchmark.cc |  117 -
 .../arrow/compute/kernels/scalar_cast_boolean.cc   |   70 -
 .../arrow/compute/kernels/scalar_cast_internal.cc  |  279 -
 .../arrow/compute/kernels/scalar_cast_internal.h   |   88 -
 .../arrow/compute/kernels/scalar_cast_nested.cc    |  126 -
 .../arrow/compute/kernels/scalar_cast_numeric.cc   |  724 --
 .../arrow/compute/kernels/scalar_cast_string.cc    |  249 -
 .../arrow/compute/kernels/scalar_cast_temporal.cc  |  456 --
 cpp/src/arrow/compute/kernels/scalar_cast_test.cc  | 1879 -----
 cpp/src/arrow/compute/kernels/scalar_compare.cc    |  220 -
 .../compute/kernels/scalar_compare_benchmark.cc    |   80 -
 .../arrow/compute/kernels/scalar_compare_test.cc   |  656 --
 cpp/src/arrow/compute/kernels/scalar_fill_null.cc  |  242 -
 .../arrow/compute/kernels/scalar_fill_null_test.cc |  168 -
 cpp/src/arrow/compute/kernels/scalar_nested.cc     |  171 -
 .../arrow/compute/kernels/scalar_nested_test.cc    |  172 -
 cpp/src/arrow/compute/kernels/scalar_set_lookup.cc |  494 --
 .../compute/kernels/scalar_set_lookup_benchmark.cc |  143 -
 .../compute/kernels/scalar_set_lookup_test.cc      |  793 --
 cpp/src/arrow/compute/kernels/scalar_string.cc     | 2162 ------
 .../compute/kernels/scalar_string_benchmark.cc     |  123 -
 .../arrow/compute/kernels/scalar_string_test.cc    |  600 --
 cpp/src/arrow/compute/kernels/scalar_validity.cc   |  180 -
 .../arrow/compute/kernels/scalar_validity_test.cc  |  128 -
 cpp/src/arrow/compute/kernels/test_util.cc         |  200 -
 cpp/src/arrow/compute/kernels/test_util.h          |  152 -
 cpp/src/arrow/compute/kernels/util_internal.cc     |   85 -
 cpp/src/arrow/compute/kernels/util_internal.h      |  157 -
 cpp/src/arrow/compute/kernels/vector_hash.cc       |  775 --
 .../arrow/compute/kernels/vector_hash_benchmark.cc |  250 -
 cpp/src/arrow/compute/kernels/vector_hash_test.cc  |  750 --
 cpp/src/arrow/compute/kernels/vector_nested.cc     |  104 -
 .../arrow/compute/kernels/vector_nested_test.cc    |   55 -
 .../compute/kernels/vector_partition_benchmark.cc  |   59 -
 cpp/src/arrow/compute/kernels/vector_selection.cc  | 2181 ------
 .../compute/kernels/vector_selection_benchmark.cc  |  354 -
 .../arrow/compute/kernels/vector_selection_test.cc | 1721 -----
 cpp/src/arrow/compute/kernels/vector_sort.cc       | 1769 -----
 .../arrow/compute/kernels/vector_sort_benchmark.cc |  285 -
 cpp/src/arrow/compute/kernels/vector_sort_test.cc  | 1263 ----
 cpp/src/arrow/compute/registry.cc                  |  154 -
 cpp/src/arrow/compute/registry.h                   |   83 -
 cpp/src/arrow/compute/registry_internal.h          |   54 -
 cpp/src/arrow/compute/registry_test.cc             |   87 -
 cpp/src/arrow/compute/type_fwd.h                   |   43 -
 cpp/src/arrow/compute/util_internal.h              |   32 -
 cpp/src/arrow/config.cc                            |   78 -
 cpp/src/arrow/config.h                             |   72 -
 cpp/src/arrow/csv/CMakeLists.txt                   |   39 -
 cpp/src/arrow/csv/api.h                            |   26 -
 cpp/src/arrow/csv/arrow-csv.pc.in                  |   24 -
 cpp/src/arrow/csv/chunker.cc                       |  266 -
 cpp/src/arrow/csv/chunker.h                        |   36 -
 cpp/src/arrow/csv/chunker_test.cc                  |  265 -
 cpp/src/arrow/csv/column_builder.cc                |  367 -
 cpp/src/arrow/csv/column_builder.h                 |   78 -
 cpp/src/arrow/csv/column_builder_test.cc           |  550 --
 cpp/src/arrow/csv/column_decoder.cc                |  367 -
 cpp/src/arrow/csv/column_decoder.h                 |   81 -
 cpp/src/arrow/csv/column_decoder_test.cc           |  427 --
 cpp/src/arrow/csv/converter.cc                     |  691 --
 cpp/src/arrow/csv/converter.h                      |   82 -
 cpp/src/arrow/csv/converter_benchmark.cc           |  152 -
 cpp/src/arrow/csv/converter_test.cc                |  636 --
 cpp/src/arrow/csv/inference_internal.h             |  150 -
 cpp/src/arrow/csv/options.cc                       |   40 -
 cpp/src/arrow/csv/options.h                        |  156 -
 cpp/src/arrow/csv/parser.cc                        |  549 --
 cpp/src/arrow/csv/parser.h                         |  192 -
 cpp/src/arrow/csv/parser_benchmark.cc              |  205 -
 cpp/src/arrow/csv/parser_test.cc                   |  627 --
 cpp/src/arrow/csv/reader.cc                        |  999 ---
 cpp/src/arrow/csv/reader.h                         |   80 -
 cpp/src/arrow/csv/reader_test.cc                   |  212 -
 cpp/src/arrow/csv/test_common.cc                   |  119 -
 cpp/src/arrow/csv/test_common.h                    |   53 -
 cpp/src/arrow/csv/type_fwd.h                       |   27 -
 cpp/src/arrow/csv/writer.cc                        |  437 --
 cpp/src/arrow/csv/writer.h                         |   47 -
 cpp/src/arrow/csv/writer_test.cc                   |  128 -
 cpp/src/arrow/dataset/ArrowDatasetConfig.cmake.in  |   37 -
 cpp/src/arrow/dataset/CMakeLists.txt               |  134 -
 cpp/src/arrow/dataset/README.md                    |   32 -
 cpp/src/arrow/dataset/api.h                        |   29 -
 cpp/src/arrow/dataset/arrow-dataset.pc.in          |   25 -
 cpp/src/arrow/dataset/dataset.cc                   |  228 -
 cpp/src/arrow/dataset/dataset.h                    |  241 -
 cpp/src/arrow/dataset/dataset_internal.h           |  211 -
 cpp/src/arrow/dataset/dataset_test.cc              |  751 --
 cpp/src/arrow/dataset/discovery.cc                 |  275 -
 cpp/src/arrow/dataset/discovery.h                  |  264 -
 cpp/src/arrow/dataset/discovery_test.cc            |  479 --
 cpp/src/arrow/dataset/expression.cc                | 1283 ----
 cpp/src/arrow/dataset/expression.h                 |  250 -
 cpp/src/arrow/dataset/expression_benchmark.cc      |   91 -
 cpp/src/arrow/dataset/expression_internal.h        |  342 -
 cpp/src/arrow/dataset/expression_test.cc           | 1282 ----
 cpp/src/arrow/dataset/file_base.cc                 |  499 --
 cpp/src/arrow/dataset/file_base.h                  |  346 -
 cpp/src/arrow/dataset/file_benchmark.cc            |   87 -
 cpp/src/arrow/dataset/file_csv.cc                  |  204 -
 cpp/src/arrow/dataset/file_csv.h                   |   85 -
 cpp/src/arrow/dataset/file_csv_test.cc             |  301 -
 cpp/src/arrow/dataset/file_ipc.cc                  |  224 -
 cpp/src/arrow/dataset/file_ipc.h                   |  111 -
 cpp/src/arrow/dataset/file_ipc_test.cc             |  388 -
 cpp/src/arrow/dataset/file_parquet.cc              |  783 --
 cpp/src/arrow/dataset/file_parquet.h               |  364 -
 cpp/src/arrow/dataset/file_parquet_test.cc         |  685 --
 cpp/src/arrow/dataset/file_test.cc                 |  541 --
 cpp/src/arrow/dataset/forest_internal.h            |  124 -
 cpp/src/arrow/dataset/partition.cc                 |  617 --
 cpp/src/arrow/dataset/partition.h                  |  321 -
 cpp/src/arrow/dataset/partition_test.cc            |  728 --
 cpp/src/arrow/dataset/pch.h                        |   27 -
 cpp/src/arrow/dataset/projector.cc                 |   63 -
 cpp/src/arrow/dataset/projector.h                  |   32 -
 cpp/src/arrow/dataset/scanner.cc                   |  567 --
 cpp/src/arrow/dataset/scanner.h                    |  437 --
 cpp/src/arrow/dataset/scanner_internal.h           |  200 -
 cpp/src/arrow/dataset/scanner_test.cc              |  457 --
 cpp/src/arrow/dataset/test_util.h                  |  839 ---
 cpp/src/arrow/dataset/type_fwd.h                   |   95 -
 cpp/src/arrow/dataset/visibility.h                 |   50 -
 cpp/src/arrow/datum.cc                             |  284 -
 cpp/src/arrow/datum.h                              |  281 -
 cpp/src/arrow/datum_test.cc                        |  172 -
 cpp/src/arrow/dbi/README.md                        |   24 -
 cpp/src/arrow/dbi/hiveserver2/CMakeLists.txt       |  118 -
 cpp/src/arrow/dbi/hiveserver2/api.h                |   27 -
 cpp/src/arrow/dbi/hiveserver2/columnar_row_set.cc  |  100 -
 cpp/src/arrow/dbi/hiveserver2/columnar_row_set.h   |  155 -
 cpp/src/arrow/dbi/hiveserver2/hiveserver2_test.cc  |  458 --
 cpp/src/arrow/dbi/hiveserver2/operation.cc         |  150 -
 cpp/src/arrow/dbi/hiveserver2/operation.h          |  127 -
 cpp/src/arrow/dbi/hiveserver2/public_api_test.cc   |   26 -
 cpp/src/arrow/dbi/hiveserver2/sample_usage.cc      |  137 -
 cpp/src/arrow/dbi/hiveserver2/service.cc           |  110 -
 cpp/src/arrow/dbi/hiveserver2/service.h            |  140 -
 cpp/src/arrow/dbi/hiveserver2/session.cc           |  103 -
 cpp/src/arrow/dbi/hiveserver2/session.h            |   84 -
 cpp/src/arrow/dbi/hiveserver2/thrift/.gitignore    |    1 -
 .../arrow/dbi/hiveserver2/thrift/CMakeLists.txt    |  117 -
 .../arrow/dbi/hiveserver2/thrift/ExecStats.thrift  |  103 -
 .../dbi/hiveserver2/thrift/ImpalaService.thrift    |  300 -
 cpp/src/arrow/dbi/hiveserver2/thrift/Status.thrift |   23 -
 .../dbi/hiveserver2/thrift/TCLIService.thrift      | 1180 ---
 cpp/src/arrow/dbi/hiveserver2/thrift/Types.thrift  |  218 -
 .../arrow/dbi/hiveserver2/thrift/beeswax.thrift    |  174 -
 cpp/src/arrow/dbi/hiveserver2/thrift/fb303.thrift  |  112 -
 .../dbi/hiveserver2/thrift/generate_error_codes.py |  293 -
 .../dbi/hiveserver2/thrift/hive_metastore.thrift   | 1214 ---
 cpp/src/arrow/dbi/hiveserver2/thrift_internal.cc   |  301 -
 cpp/src/arrow/dbi/hiveserver2/thrift_internal.h    |   91 -
 cpp/src/arrow/dbi/hiveserver2/types.cc             |   45 -
 cpp/src/arrow/dbi/hiveserver2/types.h              |  131 -
 cpp/src/arrow/dbi/hiveserver2/util.cc              |  250 -
 cpp/src/arrow/dbi/hiveserver2/util.h               |   36 -
 cpp/src/arrow/device.cc                            |  209 -
 cpp/src/arrow/device.h                             |  226 -
 cpp/src/arrow/extension_type.cc                    |  169 -
 cpp/src/arrow/extension_type.h                     |  161 -
 cpp/src/arrow/extension_type_test.cc               |  334 -
 cpp/src/arrow/filesystem/CMakeLists.txt            |   75 -
 cpp/src/arrow/filesystem/api.h                     |   28 -
 cpp/src/arrow/filesystem/arrow-filesystem.pc.in    |   24 -
 cpp/src/arrow/filesystem/filesystem.cc             |  750 --
 cpp/src/arrow/filesystem/filesystem.h              |  524 --
 cpp/src/arrow/filesystem/filesystem_test.cc        |  810 --
 cpp/src/arrow/filesystem/hdfs.cc                   |  484 --
 cpp/src/arrow/filesystem/hdfs.h                    |  111 -
 cpp/src/arrow/filesystem/hdfs_test.cc              |  312 -
 cpp/src/arrow/filesystem/localfs.cc                |  448 --
 cpp/src/arrow/filesystem/localfs.h                 |  111 -
 cpp/src/arrow/filesystem/localfs_test.cc           |  396 -
 cpp/src/arrow/filesystem/mockfs.cc                 |  767 --
 cpp/src/arrow/filesystem/mockfs.h                  |  130 -
 cpp/src/arrow/filesystem/path_util.cc              |  271 -
 cpp/src/arrow/filesystem/path_util.h               |  130 -
 cpp/src/arrow/filesystem/s3_internal.h             |  214 -
 cpp/src/arrow/filesystem/s3_test_util.h            |  154 -
 cpp/src/arrow/filesystem/s3fs.cc                   | 2139 ------
 cpp/src/arrow/filesystem/s3fs.h                    |  257 -
 cpp/src/arrow/filesystem/s3fs_benchmark.cc         |  430 --
 cpp/src/arrow/filesystem/s3fs_narrative_test.cc    |  245 -
 cpp/src/arrow/filesystem/s3fs_test.cc              |  971 ---
 cpp/src/arrow/filesystem/test_util.cc              | 1052 ---
 cpp/src/arrow/filesystem/test_util.h               |  208 -
 cpp/src/arrow/filesystem/type_fwd.h                |   49 -
 cpp/src/arrow/filesystem/util_internal.cc          |   73 -
 cpp/src/arrow/filesystem/util_internal.h           |   56 -
 cpp/src/arrow/flight/ArrowFlightConfig.cmake.in    |   36 -
 .../arrow/flight/ArrowFlightTestingConfig.cmake.in |   37 -
 cpp/src/arrow/flight/CMakeLists.txt                |  273 -
 cpp/src/arrow/flight/README.md                     |   36 -
 cpp/src/arrow/flight/api.h                         |   27 -
 cpp/src/arrow/flight/arrow-flight-testing.pc.in    |   25 -
 cpp/src/arrow/flight/arrow-flight.pc.in            |   25 -
 cpp/src/arrow/flight/client.cc                     | 1313 ----
 cpp/src/arrow/flight/client.h                      |  320 -
 cpp/src/arrow/flight/client_auth.h                 |   62 -
 cpp/src/arrow/flight/client_cookie_middleware.cc   |   65 -
 cpp/src/arrow/flight/client_cookie_middleware.h    |   33 -
 cpp/src/arrow/flight/client_header_internal.cc     |  340 -
 cpp/src/arrow/flight/client_header_internal.h      |  151 -
 cpp/src/arrow/flight/client_middleware.h           |   73 -
 cpp/src/arrow/flight/customize_protobuf.h          |  108 -
 cpp/src/arrow/flight/flight_benchmark.cc           |  431 --
 cpp/src/arrow/flight/flight_test.cc                | 2666 -------
 cpp/src/arrow/flight/internal.cc                   |  514 --
 cpp/src/arrow/flight/internal.h                    |  128 -
 cpp/src/arrow/flight/middleware.h                  |   73 -
 cpp/src/arrow/flight/middleware_internal.h         |   46 -
 cpp/src/arrow/flight/pch.h                         |   26 -
 cpp/src/arrow/flight/perf.proto                    |   44 -
 cpp/src/arrow/flight/perf_server.cc                |  255 -
 cpp/src/arrow/flight/platform.h                    |   32 -
 cpp/src/arrow/flight/protocol_internal.cc          |   26 -
 cpp/src/arrow/flight/protocol_internal.h           |   28 -
 cpp/src/arrow/flight/serialization_internal.cc     |  469 --
 cpp/src/arrow/flight/serialization_internal.h      |  150 -
 cpp/src/arrow/flight/server.cc                     | 1164 ---
 cpp/src/arrow/flight/server.h                      |  282 -
 cpp/src/arrow/flight/server_auth.cc                |   37 -
 cpp/src/arrow/flight/server_auth.h                 |   78 -
 cpp/src/arrow/flight/server_middleware.h           |   83 -
 cpp/src/arrow/flight/test_integration.cc           |  270 -
 cpp/src/arrow/flight/test_integration.h            |   49 -
 cpp/src/arrow/flight/test_integration_client.cc    |  244 -
 cpp/src/arrow/flight/test_integration_server.cc    |  207 -
 cpp/src/arrow/flight/test_server.cc                |   62 -
 cpp/src/arrow/flight/test_util.cc                  |  779 --
 cpp/src/arrow/flight/test_util.h                   |  238 -
 .../arrow/flight/try_compile/check_tls_opts_127.cc |   36 -
 .../arrow/flight/try_compile/check_tls_opts_132.cc |   36 -
 .../arrow/flight/try_compile/check_tls_opts_134.cc |   44 -
 .../arrow/flight/try_compile/check_tls_opts_136.cc |   38 -
 cpp/src/arrow/flight/types.cc                      |  327 -
 cpp/src/arrow/flight/types.h                       |  521 --
 cpp/src/arrow/flight/visibility.h                  |   48 -
 cpp/src/arrow/gpu/.gitignore                       |   18 -
 cpp/src/arrow/gpu/ArrowCUDAConfig.cmake.in         |   36 -
 cpp/src/arrow/gpu/CMakeLists.txt                   |   87 -
 cpp/src/arrow/gpu/arrow-cuda.pc.in                 |   26 -
 cpp/src/arrow/gpu/cuda_api.h                       |   23 -
 cpp/src/arrow/gpu/cuda_arrow_ipc.cc                |   69 -
 cpp/src/arrow/gpu/cuda_arrow_ipc.h                 |   72 -
 cpp/src/arrow/gpu/cuda_benchmark.cc                |   94 -
 cpp/src/arrow/gpu/cuda_context.cc                  |  645 --
 cpp/src/arrow/gpu/cuda_context.h                   |  309 -
 cpp/src/arrow/gpu/cuda_internal.cc                 |   66 -
 cpp/src/arrow/gpu/cuda_internal.h                  |   60 -
 cpp/src/arrow/gpu/cuda_memory.cc                   |  487 --
 cpp/src/arrow/gpu/cuda_memory.h                    |  260 -
 cpp/src/arrow/gpu/cuda_test.cc                     |  626 --
 cpp/src/arrow/gpu/cuda_version.h.in                |   25 -
 cpp/src/arrow/io/CMakeLists.txt                    |   39 -
 cpp/src/arrow/io/api.h                             |   25 -
 cpp/src/arrow/io/buffered.cc                       |  480 --
 cpp/src/arrow/io/buffered.h                        |  164 -
 cpp/src/arrow/io/buffered_test.cc                  |  667 --
 cpp/src/arrow/io/caching.cc                        |  208 -
 cpp/src/arrow/io/caching.h                         |  113 -
 cpp/src/arrow/io/compressed.cc                     |  441 --
 cpp/src/arrow/io/compressed.h                      |  115 -
 cpp/src/arrow/io/compressed_test.cc                |  305 -
 cpp/src/arrow/io/concurrency.h                     |  263 -
 cpp/src/arrow/io/file.cc                           |  775 --
 cpp/src/arrow/io/file.h                            |  221 -
 cpp/src/arrow/io/file_benchmark.cc                 |  301 -
 cpp/src/arrow/io/file_test.cc                      | 1064 ---
 cpp/src/arrow/io/hdfs.cc                           |  695 --
 cpp/src/arrow/io/hdfs.h                            |  280 -
 cpp/src/arrow/io/hdfs_internal.cc                  |  545 --
 cpp/src/arrow/io/hdfs_internal.h                   |  222 -
 cpp/src/arrow/io/hdfs_test.cc                      |  464 --
 cpp/src/arrow/io/interfaces.cc                     |  441 --
 cpp/src/arrow/io/interfaces.h                      |  329 -
 cpp/src/arrow/io/memory.cc                         |  388 -
 cpp/src/arrow/io/memory.h                          |  197 -
 cpp/src/arrow/io/memory_benchmark.cc               |  359 -
 cpp/src/arrow/io/memory_test.cc                    |  756 --
 cpp/src/arrow/io/mman.h                            |  169 -
 cpp/src/arrow/io/slow.cc                           |  148 -
 cpp/src/arrow/io/slow.h                            |  118 -
 cpp/src/arrow/io/test_common.cc                    |  121 -
 cpp/src/arrow/io/test_common.h                     |   58 -
 cpp/src/arrow/io/transform.cc                      |  149 -
 cpp/src/arrow/io/transform.h                       |   56 -
 cpp/src/arrow/io/type_fwd.h                        |   60 -
 cpp/src/arrow/io/util_internal.h                   |   66 -
 cpp/src/arrow/ipc/CMakeLists.txt                   |   87 -
 cpp/src/arrow/ipc/api.h                            |   25 -
 cpp/src/arrow/ipc/dictionary.cc                    |  412 -
 cpp/src/arrow/ipc/dictionary.h                     |  177 -
 cpp/src/arrow/ipc/feather.cc                       |  817 --
 cpp/src/arrow/ipc/feather.fbs                      |  156 -
 cpp/src/arrow/ipc/feather.h                        |  140 -
 cpp/src/arrow/ipc/feather_test.cc                  |  373 -
 cpp/src/arrow/ipc/file_fuzz.cc                     |   28 -
 cpp/src/arrow/ipc/file_to_stream.cc                |   65 -
 cpp/src/arrow/ipc/generate_fuzz_corpus.cc          |  161 -
 cpp/src/arrow/ipc/generate_tensor_fuzz_corpus.cc   |  134 -
 cpp/src/arrow/ipc/json_simple.cc                   |  917 ---
 cpp/src/arrow/ipc/json_simple.h                    |   57 -
 cpp/src/arrow/ipc/json_simple_test.cc              | 1333 ----
 cpp/src/arrow/ipc/message.cc                       |  876 ---
 cpp/src/arrow/ipc/message.h                        |  531 --
 cpp/src/arrow/ipc/metadata_internal.cc             | 1486 ----
 cpp/src/arrow/ipc/metadata_internal.h              |  227 -
 cpp/src/arrow/ipc/options.cc                       |   41 -
 cpp/src/arrow/ipc/options.h                        |  161 -
 cpp/src/arrow/ipc/read_write_benchmark.cc          |  197 -
 cpp/src/arrow/ipc/read_write_test.cc               | 2356 ------
 cpp/src/arrow/ipc/reader.cc                        | 1822 -----
 cpp/src/arrow/ipc/reader.h                         |  495 --
 cpp/src/arrow/ipc/stream_fuzz.cc                   |   28 -
 cpp/src/arrow/ipc/stream_to_file.cc                |   61 -
 cpp/src/arrow/ipc/tensor_stream_fuzz.cc            |   29 -
 cpp/src/arrow/ipc/tensor_test.cc                   |  506 --
 cpp/src/arrow/ipc/test_common.cc                   | 1104 ---
 cpp/src/arrow/ipc/test_common.h                    |  172 -
 cpp/src/arrow/ipc/type_fwd.h                       |   65 -
 cpp/src/arrow/ipc/util.h                           |   41 -
 cpp/src/arrow/ipc/writer.cc                        | 1429 ----
 cpp/src/arrow/ipc/writer.h                         |  459 --
 cpp/src/arrow/json/CMakeLists.txt                  |   32 -
 cpp/src/arrow/json/api.h                           |   21 -
 cpp/src/arrow/json/arrow-json.pc.in                |   24 -
 cpp/src/arrow/json/chunked_builder.cc              |  469 --
 cpp/src/arrow/json/chunked_builder.h               |   68 -
 cpp/src/arrow/json/chunked_builder_test.cc         |  454 --
 cpp/src/arrow/json/chunker.cc                      |  181 -
 cpp/src/arrow/json/chunker.h                       |   35 -
 cpp/src/arrow/json/chunker_test.cc                 |  276 -
 cpp/src/arrow/json/converter.cc                    |  323 -
 cpp/src/arrow/json/converter.h                     |   94 -
 cpp/src/arrow/json/converter_test.cc               |  100 -
 cpp/src/arrow/json/object_parser.cc                |   83 -
 cpp/src/arrow/json/object_parser.h                 |   49 -
 cpp/src/arrow/json/object_writer.cc                |   82 -
 cpp/src/arrow/json/object_writer.h                 |   48 -
 cpp/src/arrow/json/options.cc                      |   28 -
 cpp/src/arrow/json/options.h                       |   74 -
 cpp/src/arrow/json/parser.cc                       | 1099 ---
 cpp/src/arrow/json/parser.h                        |  101 -
 cpp/src/arrow/json/parser_benchmark.cc             |  164 -
 cpp/src/arrow/json/parser_test.cc                  |  254 -
 cpp/src/arrow/json/rapidjson_defs.h                |   43 -
 cpp/src/arrow/json/reader.cc                       |  227 -
 cpp/src/arrow/json/reader.h                        |   72 -
 cpp/src/arrow/json/reader_test.cc                  |  278 -
 cpp/src/arrow/json/test_common.h                   |  251 -
 cpp/src/arrow/json/type_fwd.h                      |   26 -
 cpp/src/arrow/memory_pool.cc                       |  657 --
 cpp/src/arrow/memory_pool.h                        |  178 -
 cpp/src/arrow/memory_pool_benchmark.cc             |  129 -
 cpp/src/arrow/memory_pool_test.cc                  |  174 -
 cpp/src/arrow/memory_pool_test.h                   |   92 -
 cpp/src/arrow/pch.h                                |   30 -
 cpp/src/arrow/pretty_print.cc                      |  673 --
 cpp/src/arrow/pretty_print.h                       |  123 -
 cpp/src/arrow/pretty_print_test.cc                 |  744 --
 cpp/src/arrow/public_api_test.cc                   |   89 -
 cpp/src/arrow/python/ArrowPythonConfig.cmake.in    |   36 -
 .../arrow/python/ArrowPythonFlightConfig.cmake.in  |   37 -
 cpp/src/arrow/python/CMakeLists.txt                |  186 -
 cpp/src/arrow/python/api.h                         |   30 -
 cpp/src/arrow/python/arrow-python-flight.pc.in     |   25 -
 cpp/src/arrow/python/arrow-python.pc.in            |   26 -
 cpp/src/arrow/python/arrow_to_pandas.cc            | 2294 ------
 cpp/src/arrow/python/arrow_to_pandas.h             |  124 -
 cpp/src/arrow/python/benchmark.cc                  |   38 -
 cpp/src/arrow/python/benchmark.h                   |   36 -
 cpp/src/arrow/python/common.cc                     |  206 -
 cpp/src/arrow/python/common.h                      |  300 -
 cpp/src/arrow/python/datetime.cc                   |  455 --
 cpp/src/arrow/python/datetime.h                    |  183 -
 cpp/src/arrow/python/decimal.cc                    |  247 -
 cpp/src/arrow/python/decimal.h                     |  128 -
 cpp/src/arrow/python/deserialize.cc                |  495 --
 cpp/src/arrow/python/deserialize.h                 |  106 -
 cpp/src/arrow/python/extension_type.cc             |  217 -
 cpp/src/arrow/python/extension_type.h              |   85 -
 cpp/src/arrow/python/filesystem.cc                 |  206 -
 cpp/src/arrow/python/filesystem.h                  |  122 -
 cpp/src/arrow/python/flight.cc                     |  408 -
 cpp/src/arrow/python/flight.h                      |  357 -
 cpp/src/arrow/python/helpers.cc                    |  436 --
 cpp/src/arrow/python/helpers.h                     |  156 -
 cpp/src/arrow/python/inference.cc                  |  659 --
 cpp/src/arrow/python/inference.h                   |   64 -
 cpp/src/arrow/python/init.cc                       |   24 -
 cpp/src/arrow/python/init.h                        |   26 -
 cpp/src/arrow/python/io.cc                         |  374 -
 cpp/src/arrow/python/io.h                          |  116 -
 cpp/src/arrow/python/ipc.cc                        |   67 -
 cpp/src/arrow/python/ipc.h                         |   52 -
 cpp/src/arrow/python/iterators.h                   |  154 -
 cpp/src/arrow/python/numpy_convert.cc              |  561 --
 cpp/src/arrow/python/numpy_convert.h               |  120 -
 cpp/src/arrow/python/numpy_internal.h              |  182 -
 cpp/src/arrow/python/numpy_interop.h               |   96 -
 cpp/src/arrow/python/numpy_to_arrow.cc             |  854 ---
 cpp/src/arrow/python/numpy_to_arrow.h              |   72 -
 cpp/src/arrow/python/pch.h                         |   24 -
 cpp/src/arrow/python/platform.h                    |   36 -
 cpp/src/arrow/python/pyarrow.cc                    |   93 -
 cpp/src/arrow/python/pyarrow.h                     |   87 -
 cpp/src/arrow/python/pyarrow_api.h                 |  239 -
 cpp/src/arrow/python/pyarrow_lib.h                 |   82 -
 cpp/src/arrow/python/python_test.cc                |  536 --
 cpp/src/arrow/python/python_to_arrow.cc            | 1062 ---
 cpp/src/arrow/python/python_to_arrow.h             |   80 -
 cpp/src/arrow/python/serialize.cc                  |  798 --
 cpp/src/arrow/python/serialize.h                   |  145 -
 cpp/src/arrow/python/type_traits.h                 |  350 -
 cpp/src/arrow/python/util/CMakeLists.txt           |   32 -
 cpp/src/arrow/python/util/test_main.cc             |   41 -
 cpp/src/arrow/python/visibility.h                  |   39 -
 cpp/src/arrow/record_batch.cc                      |  348 -
 cpp/src/arrow/record_batch.h                       |  232 -
 cpp/src/arrow/record_batch_test.cc                 |  292 -
 cpp/src/arrow/result.cc                            |   36 -
 cpp/src/arrow/result.h                             |  514 --
 cpp/src/arrow/result_internal.h                    |   22 -
 cpp/src/arrow/result_test.cc                       |  728 --
 cpp/src/arrow/scalar.cc                            |  645 --
 cpp/src/arrow/scalar.h                             |  537 --
 cpp/src/arrow/scalar_test.cc                       | 1046 ---
 cpp/src/arrow/sparse_tensor.cc                     |  478 --
 cpp/src/arrow/sparse_tensor.h                      |  624 --
 cpp/src/arrow/sparse_tensor_test.cc                | 1678 -----
 cpp/src/arrow/status.cc                            |  143 -
 cpp/src/arrow/status.h                             |  448 --
 cpp/src/arrow/status_test.cc                       |  130 -
 cpp/src/arrow/stl.h                                |  466 --
 cpp/src/arrow/stl_allocator.h                      |  153 -
 cpp/src/arrow/stl_iterator.h                       |  142 -
 cpp/src/arrow/stl_iterator_test.cc                 |  252 -
 cpp/src/arrow/stl_test.cc                          |  558 --
 cpp/src/arrow/symbols.map                          |   38 -
 cpp/src/arrow/table.cc                             |  645 --
 cpp/src/arrow/table.h                              |  295 -
 cpp/src/arrow/table_builder.cc                     |  113 -
 cpp/src/arrow/table_builder.h                      |  110 -
 cpp/src/arrow/table_builder_test.cc                |  182 -
 cpp/src/arrow/table_test.cc                        |  753 --
 cpp/src/arrow/tensor.cc                            |  342 -
 cpp/src/arrow/tensor.h                             |  250 -
 cpp/src/arrow/tensor/CMakeLists.txt                |   25 -
 cpp/src/arrow/tensor/converter.h                   |   67 -
 cpp/src/arrow/tensor/converter_internal.h          |   88 -
 cpp/src/arrow/tensor/coo_converter.cc              |  333 -
 cpp/src/arrow/tensor/csf_converter.cc              |  289 -
 cpp/src/arrow/tensor/csx_converter.cc              |  241 -
 .../arrow/tensor/tensor_conversion_benchmark.cc    |  230 -
 cpp/src/arrow/tensor_test.cc                       |  749 --
 cpp/src/arrow/testing/CMakeLists.txt               |   37 -
 cpp/src/arrow/testing/extension_type.h             |  119 -
 cpp/src/arrow/testing/future_util.h                |  104 -
 cpp/src/arrow/testing/generator.cc                 |  182 -
 cpp/src/arrow/testing/generator.h                  |  261 -
 cpp/src/arrow/testing/gtest_common.h               |  128 -
 cpp/src/arrow/testing/gtest_compat.h               |   29 -
 cpp/src/arrow/testing/gtest_util.cc                |  854 ---
 cpp/src/arrow/testing/gtest_util.h                 |  622 --
 cpp/src/arrow/testing/json_integration.cc          |  219 -
 cpp/src/arrow/testing/json_integration.h           |  129 -
 cpp/src/arrow/testing/json_integration_test.cc     | 1189 ---
 cpp/src/arrow/testing/json_internal.cc             | 1755 -----
 cpp/src/arrow/testing/json_internal.h              |  126 -
 cpp/src/arrow/testing/macros.h                     |   29 -
 cpp/src/arrow/testing/pch.h                        |   26 -
 cpp/src/arrow/testing/random.cc                    |  918 ---
 cpp/src/arrow/testing/random.h                     |  458 --
 cpp/src/arrow/testing/random_test.cc               |  356 -
 cpp/src/arrow/testing/util.cc                      |  185 -
 cpp/src/arrow/testing/util.h                       |  190 -
 cpp/src/arrow/testing/visibility.h                 |   48 -
 cpp/src/arrow/type.cc                              | 2267 ------
 cpp/src/arrow/type.h                               | 1926 -----
 cpp/src/arrow/type_benchmark.cc                    |  439 --
 cpp/src/arrow/type_fwd.h                           |  677 --
 cpp/src/arrow/type_test.cc                         | 1775 -----
 cpp/src/arrow/type_traits.h                        | 1012 ---
 cpp/src/arrow/util/CMakeLists.txt                  |   94 -
 cpp/src/arrow/util/algorithm.h                     |   33 -
 cpp/src/arrow/util/align_util.h                    |   68 -
 cpp/src/arrow/util/align_util_test.cc              |  150 -
 cpp/src/arrow/util/async_generator.h               | 1561 ----
 cpp/src/arrow/util/async_generator_test.cc         | 1456 ----
 cpp/src/arrow/util/atomic_shared_ptr.h             |  111 -
 cpp/src/arrow/util/base64.h                        |   34 -
 cpp/src/arrow/util/basic_decimal.cc                | 1344 ----
 cpp/src/arrow/util/basic_decimal.h                 |  341 -
 cpp/src/arrow/util/benchmark_main.cc               |   24 -
 cpp/src/arrow/util/benchmark_util.h                |  138 -
 cpp/src/arrow/util/bit_block_counter.cc            |   80 -
 cpp/src/arrow/util/bit_block_counter.h             |  529 --
 cpp/src/arrow/util/bit_block_counter_benchmark.cc  |  266 -
 cpp/src/arrow/util/bit_block_counter_test.cc       |  417 -
 cpp/src/arrow/util/bit_run_reader.cc               |   54 -
 cpp/src/arrow/util/bit_run_reader.h                |  515 --
 cpp/src/arrow/util/bit_stream_utils.h              |  433 --
 cpp/src/arrow/util/bit_util.cc                     |   71 -
 cpp/src/arrow/util/bit_util.h                      |  320 -
 cpp/src/arrow/util/bit_util_benchmark.cc           |  560 --
 cpp/src/arrow/util/bit_util_test.cc                | 2160 ------
 cpp/src/arrow/util/bitmap.cc                       |   75 -
 cpp/src/arrow/util/bitmap.h                        |  309 -
 cpp/src/arrow/util/bitmap_builders.cc              |   72 -
 cpp/src/arrow/util/bitmap_builders.h               |   43 -
 cpp/src/arrow/util/bitmap_generate.h               |  111 -
 cpp/src/arrow/util/bitmap_ops.cc                   |  588 --
 cpp/src/arrow/util/bitmap_ops.h                    |  187 -
 cpp/src/arrow/util/bitmap_reader.h                 |  159 -
 cpp/src/arrow/util/bitmap_visit.h                  |   88 -
 cpp/src/arrow/util/bitmap_writer.h                 |  184 -
 cpp/src/arrow/util/bitset_stack.h                  |   89 -
 cpp/src/arrow/util/bpacking.cc                     |  178 -
 cpp/src/arrow/util/bpacking.h                      |   32 -
 cpp/src/arrow/util/bpacking_avx2.cc                |   31 -
 cpp/src/arrow/util/bpacking_avx2.h                 |   28 -
 cpp/src/arrow/util/bpacking_avx2_generated.h       | 1819 -----
 cpp/src/arrow/util/bpacking_avx512.cc              |   31 -
 cpp/src/arrow/util/bpacking_avx512.h               |   28 -
 cpp/src/arrow/util/bpacking_avx512_generated.h     | 1509 ----
 cpp/src/arrow/util/bpacking_default.h              | 4251 -----------
 cpp/src/arrow/util/bpacking_neon.cc                |   31 -
 cpp/src/arrow/util/bpacking_neon.h                 |   28 -
 cpp/src/arrow/util/bpacking_simd128_generated.h    | 2138 ------
 cpp/src/arrow/util/bpacking_simd256_generated.h    | 1270 ----
 cpp/src/arrow/util/bpacking_simd512_generated.h    |  836 --
 cpp/src/arrow/util/bpacking_simd_codegen.py        |  209 -
 cpp/src/arrow/util/bpacking_simd_internal.h        |  138 -
 cpp/src/arrow/util/byte_stream_split.h             |  626 --
 cpp/src/arrow/util/cache_benchmark.cc              |  146 -
 cpp/src/arrow/util/cache_internal.h                |  210 -
 cpp/src/arrow/util/cache_test.cc                   |  290 -
 cpp/src/arrow/util/cancel.cc                       |  226 -
 cpp/src/arrow/util/cancel.h                        |  102 -
 cpp/src/arrow/util/cancel_test.cc                  |  308 -
 cpp/src/arrow/util/checked_cast.h                  |   61 -
 cpp/src/arrow/util/checked_cast_test.cc            |   74 -
 cpp/src/arrow/util/compare.h                       |   62 -
 cpp/src/arrow/util/compiler_util.h                 |   22 -
 cpp/src/arrow/util/compression.cc                  |  231 -
 cpp/src/arrow/util/compression.h                   |  181 -
 cpp/src/arrow/util/compression_benchmark.cc        |  201 -
 cpp/src/arrow/util/compression_brotli.cc           |  240 -
 cpp/src/arrow/util/compression_bz2.cc              |  281 -
 cpp/src/arrow/util/compression_internal.h          |   80 -
 cpp/src/arrow/util/compression_lz4.cc              |  489 --
 cpp/src/arrow/util/compression_snappy.cc           |   99 -
 cpp/src/arrow/util/compression_test.cc             |  604 --
 cpp/src/arrow/util/compression_zlib.cc             |  501 --
 cpp/src/arrow/util/compression_zstd.cc             |  246 -
 cpp/src/arrow/util/concurrent_map.h                |   68 -
 cpp/src/arrow/util/config.h.cmake                  |   48 -
 cpp/src/arrow/util/converter.h                     |  368 -
 cpp/src/arrow/util/cpu_info.cc                     |  563 --
 cpp/src/arrow/util/cpu_info.h                      |  143 -
 cpp/src/arrow/util/decimal.cc                      |  932 ---
 cpp/src/arrow/util/decimal.h                       |  291 -
 cpp/src/arrow/util/decimal_benchmark.cc            |  219 -
 cpp/src/arrow/util/decimal_test.cc                 | 1671 ----
 cpp/src/arrow/util/delimiting.cc                   |  141 -
 cpp/src/arrow/util/delimiting.h                    |  147 -
 cpp/src/arrow/util/dispatch.h                      |  115 -
 cpp/src/arrow/util/double_conversion.h             |   32 -
 cpp/src/arrow/util/endian.h                        |  181 -
 cpp/src/arrow/util/formatting.cc                   |   91 -
 cpp/src/arrow/util/formatting.h                    |  426 --
 cpp/src/arrow/util/formatting_util_test.cc         |  430 --
 cpp/src/arrow/util/functional.h                    |  130 -
 cpp/src/arrow/util/future.cc                       |  375 -
 cpp/src/arrow/util/future.h                        |  762 --
 cpp/src/arrow/util/future_iterator.h               |   75 -
 cpp/src/arrow/util/future_test.cc                  | 1602 ----
 cpp/src/arrow/util/hash_util.h                     |   66 -
 cpp/src/arrow/util/hashing.h                       |  878 ---
 cpp/src/arrow/util/hashing_benchmark.cc            |  123 -
 cpp/src/arrow/util/hashing_test.cc                 |  490 --
 cpp/src/arrow/util/int128_internal.h               |   45 -
 cpp/src/arrow/util/int_util.cc                     |  952 ---
 cpp/src/arrow/util/int_util.h                      |  117 -
 cpp/src/arrow/util/int_util_benchmark.cc           |  143 -
 cpp/src/arrow/util/int_util_internal.h             |  125 -
 cpp/src/arrow/util/int_util_test.cc                |  597 --
 cpp/src/arrow/util/io_util.cc                      | 1724 -----
 cpp/src/arrow/util/io_util.h                       |  403 -
 cpp/src/arrow/util/io_util_test.cc                 |  700 --
 cpp/src/arrow/util/io_util_test.manifest           |   39 -
 cpp/src/arrow/util/io_util_test.rc                 |   44 -
 cpp/src/arrow/util/iterator.h                      |  563 --
 cpp/src/arrow/util/iterator_test.cc                |  464 --
 cpp/src/arrow/util/key_value_metadata.cc           |  269 -
 cpp/src/arrow/util/key_value_metadata.h            |   96 -
 cpp/src/arrow/util/key_value_metadata_test.cc      |  211 -
 cpp/src/arrow/util/logging.cc                      |  256 -
 cpp/src/arrow/util/logging.h                       |  259 -
 cpp/src/arrow/util/logging_test.cc                 |  103 -
 cpp/src/arrow/util/machine_benchmark.cc            |   74 -
 cpp/src/arrow/util/macros.h                        |  185 -
 cpp/src/arrow/util/make_unique.h                   |   42 -
 cpp/src/arrow/util/map.h                           |   63 -
 cpp/src/arrow/util/memory.cc                       |   74 -
 cpp/src/arrow/util/memory.h                        |   43 -
 cpp/src/arrow/util/mutex.cc                        |   54 -
 cpp/src/arrow/util/mutex.h                         |   64 -
 cpp/src/arrow/util/optional.h                      |   35 -
 cpp/src/arrow/util/parallel.h                      |   65 -
 cpp/src/arrow/util/print.h                         |   51 -
 cpp/src/arrow/util/queue.h                         |   29 -
 cpp/src/arrow/util/queue_benchmark.cc              |   85 -
 cpp/src/arrow/util/queue_test.cc                   |   55 -
 cpp/src/arrow/util/range.h                         |  155 -
 cpp/src/arrow/util/range_benchmark.cc              |  128 -
 cpp/src/arrow/util/range_test.cc                   |   69 -
 cpp/src/arrow/util/rle_encoding.h                  |  826 --
 cpp/src/arrow/util/rle_encoding_test.cc            |  573 --
 cpp/src/arrow/util/simd.h                          |   50 -
 cpp/src/arrow/util/sort.h                          |   78 -
 cpp/src/arrow/util/spaced.h                        |   98 -
 cpp/src/arrow/util/stl_util_test.cc                |  172 -
 cpp/src/arrow/util/stopwatch.h                     |   48 -
 cpp/src/arrow/util/string.cc                       |  191 -
 cpp/src/arrow/util/string.h                        |   79 -
 cpp/src/arrow/util/string_builder.cc               |   40 -
 cpp/src/arrow/util/string_builder.h                |   84 -
 cpp/src/arrow/util/string_test.cc                  |  144 -
 cpp/src/arrow/util/string_view.h                   |   38 -
 cpp/src/arrow/util/task_group.cc                   |  224 -
 cpp/src/arrow/util/task_group.h                    |  106 -
 cpp/src/arrow/util/task_group_test.cc              |  444 --
 cpp/src/arrow/util/tdigest.cc                      |  416 -
 cpp/src/arrow/util/tdigest.h                       |  103 -
 cpp/src/arrow/util/tdigest_benchmark.cc            |   48 -
 cpp/src/arrow/util/tdigest_test.cc                 |  290 -
 cpp/src/arrow/util/test_common.cc                  |   68 -
 cpp/src/arrow/util/test_common.h                   |   88 -
 cpp/src/arrow/util/thread_pool.cc                  |  440 --
 cpp/src/arrow/util/thread_pool.h                   |  343 -
 cpp/src/arrow/util/thread_pool_benchmark.cc        |  231 -
 cpp/src/arrow/util/thread_pool_test.cc             |  666 --
 cpp/src/arrow/util/time.cc                         |   68 -
 cpp/src/arrow/util/time.h                          |   82 -
 cpp/src/arrow/util/time_test.cc                    |   63 -
 cpp/src/arrow/util/trie.cc                         |  211 -
 cpp/src/arrow/util/trie.h                          |  245 -
 cpp/src/arrow/util/trie_benchmark.cc               |  222 -
 cpp/src/arrow/util/trie_test.cc                    |  305 -
 cpp/src/arrow/util/type_fwd.h                      |   62 -
 cpp/src/arrow/util/type_traits.h                   |   86 -
 cpp/src/arrow/util/ubsan.h                         |   88 -
 cpp/src/arrow/util/uri.cc                          |  292 -
 cpp/src/arrow/util/uri.h                           |  104 -
 cpp/src/arrow/util/uri_test.cc                     |  312 -
 cpp/src/arrow/util/utf8.cc                         |  158 -
 cpp/src/arrow/util/utf8.h                          |  522 --
 cpp/src/arrow/util/utf8_util_benchmark.cc          |  142 -
 cpp/src/arrow/util/utf8_util_test.cc               |  493 --
 cpp/src/arrow/util/value_parsing.cc                |   87 -
 cpp/src/arrow/util/value_parsing.h                 |  780 --
 cpp/src/arrow/util/value_parsing_benchmark.cc      |  255 -
 cpp/src/arrow/util/value_parsing_test.cc           |  460 --
 cpp/src/arrow/util/variant.h                       |  440 --
 cpp/src/arrow/util/variant_benchmark.cc            |  248 -
 cpp/src/arrow/util/variant_test.cc                 |  330 -
 cpp/src/arrow/util/vector.h                        |  137 -
 cpp/src/arrow/util/visibility.h                    |   45 -
 cpp/src/arrow/util/windows_compatibility.h         |   42 -
 cpp/src/arrow/util/windows_fixup.h                 |   45 -
 cpp/src/arrow/vendored/CMakeLists.txt              |   21 -
 cpp/src/arrow/vendored/ProducerConsumerQueue.h     |  217 -
 cpp/src/arrow/vendored/base64.cpp                  |  128 -
 cpp/src/arrow/vendored/datetime.h                  |   26 -
 cpp/src/arrow/vendored/datetime/CMakeLists.txt     |   18 -
 cpp/src/arrow/vendored/datetime/README.md          |   21 -
 cpp/src/arrow/vendored/datetime/date.h             | 7949 --------------------
 cpp/src/arrow/vendored/datetime/ios.h              |   53 -
 cpp/src/arrow/vendored/datetime/ios.mm             |  340 -
 cpp/src/arrow/vendored/datetime/tz.cpp             | 3877 ----------
 cpp/src/arrow/vendored/datetime/tz.h               | 2804 -------
 cpp/src/arrow/vendored/datetime/tz_private.h       |  319 -
 cpp/src/arrow/vendored/datetime/visibility.h       |   26 -
 .../arrow/vendored/double-conversion/.gitignore    |    1 -
 .../vendored/double-conversion/CMakeLists.txt      |   18 -
 cpp/src/arrow/vendored/double-conversion/README.md |   20 -
 .../vendored/double-conversion/bignum-dtoa.cc      |  641 --
 .../arrow/vendored/double-conversion/bignum-dtoa.h |   84 -
 cpp/src/arrow/vendored/double-conversion/bignum.cc |  767 --
 cpp/src/arrow/vendored/double-conversion/bignum.h  |  144 -
 .../vendored/double-conversion/cached-powers.cc    |  175 -
 .../vendored/double-conversion/cached-powers.h     |   64 -
 cpp/src/arrow/vendored/double-conversion/diy-fp.cc |   57 -
 cpp/src/arrow/vendored/double-conversion/diy-fp.h  |  118 -
 .../double-conversion/double-conversion.cc         | 1171 ---
 .../vendored/double-conversion/double-conversion.h |  587 --
 .../arrow/vendored/double-conversion/fast-dtoa.cc  |  665 --
 .../arrow/vendored/double-conversion/fast-dtoa.h   |   88 -
 .../arrow/vendored/double-conversion/fixed-dtoa.cc |  405 -
 .../arrow/vendored/double-conversion/fixed-dtoa.h  |   56 -
 cpp/src/arrow/vendored/double-conversion/ieee.h    |  402 -
 cpp/src/arrow/vendored/double-conversion/strtod.cc |  580 --
 cpp/src/arrow/vendored/double-conversion/strtod.h  |   45 -
 cpp/src/arrow/vendored/double-conversion/utils.h   |  367 -
 cpp/src/arrow/vendored/fast_float/README.md        |    7 -
 cpp/src/arrow/vendored/fast_float/ascii_number.h   |  301 -
 .../arrow/vendored/fast_float/decimal_to_binary.h  |  176 -
 cpp/src/arrow/vendored/fast_float/fast_float.h     |   48 -
 cpp/src/arrow/vendored/fast_float/fast_table.h     |  691 --
 cpp/src/arrow/vendored/fast_float/float_common.h   |  345 -
 cpp/src/arrow/vendored/fast_float/parse_number.h   |  133 -
 .../fast_float/simple_decimal_conversion.h         |  362 -
 cpp/src/arrow/vendored/musl/README.md              |   25 -
 cpp/src/arrow/vendored/musl/strptime.c             |  237 -
 cpp/src/arrow/vendored/optional.hpp                | 1553 ----
 cpp/src/arrow/vendored/portable-snippets/README.md |   10 -
 .../arrow/vendored/portable-snippets/safe-math.h   | 1072 ---
 cpp/src/arrow/vendored/string_view.hpp             | 1531 ----
 cpp/src/arrow/vendored/strptime.h                  |   35 -
 cpp/src/arrow/vendored/uriparser/README.md         |   25 -
 cpp/src/arrow/vendored/uriparser/Uri.h             | 1090 ---
 cpp/src/arrow/vendored/uriparser/UriBase.h         |  377 -
 cpp/src/arrow/vendored/uriparser/UriCommon.c       |  572 --
 cpp/src/arrow/vendored/uriparser/UriCommon.h       |  109 -
 cpp/src/arrow/vendored/uriparser/UriCompare.c      |  168 -
 cpp/src/arrow/vendored/uriparser/UriDefsAnsi.h     |   82 -
 cpp/src/arrow/vendored/uriparser/UriDefsConfig.h   |  102 -
 cpp/src/arrow/vendored/uriparser/UriDefsUnicode.h  |   82 -
 cpp/src/arrow/vendored/uriparser/UriEscape.c       |  453 --
 cpp/src/arrow/vendored/uriparser/UriFile.c         |  242 -
 cpp/src/arrow/vendored/uriparser/UriIp4.c          |  329 -
 cpp/src/arrow/vendored/uriparser/UriIp4.h          |  110 -
 cpp/src/arrow/vendored/uriparser/UriIp4Base.c      |   96 -
 cpp/src/arrow/vendored/uriparser/UriIp4Base.h      |   59 -
 cpp/src/arrow/vendored/uriparser/UriMemory.c       |  468 --
 cpp/src/arrow/vendored/uriparser/UriMemory.h       |   78 -
 cpp/src/arrow/vendored/uriparser/UriNormalize.c    |  771 --
 .../arrow/vendored/uriparser/UriNormalizeBase.c    |  119 -
 .../arrow/vendored/uriparser/UriNormalizeBase.h    |   53 -
 cpp/src/arrow/vendored/uriparser/UriParse.c        | 2410 ------
 cpp/src/arrow/vendored/uriparser/UriParseBase.c    |   90 -
 cpp/src/arrow/vendored/uriparser/UriParseBase.h    |   55 -
 cpp/src/arrow/vendored/uriparser/UriQuery.c        |  501 --
 cpp/src/arrow/vendored/uriparser/UriRecompose.c    |  577 --
 cpp/src/arrow/vendored/uriparser/UriResolve.c      |  329 -
 cpp/src/arrow/vendored/uriparser/UriShorten.c      |  324 -
 cpp/src/arrow/vendored/uriparser/config.h          |   47 -
 cpp/src/arrow/vendored/utfcpp/README.md            |   28 -
 cpp/src/arrow/vendored/utfcpp/checked.h            |  333 -
 cpp/src/arrow/vendored/utfcpp/core.h               |  338 -
 cpp/src/arrow/vendored/utfcpp/cpp11.h              |  103 -
 cpp/src/arrow/vendored/xxhash.h                    |   18 -
 cpp/src/arrow/vendored/xxhash/README.md            |   22 -
 cpp/src/arrow/vendored/xxhash/xxhash.c             |   43 -
 cpp/src/arrow/vendored/xxhash/xxhash.h             | 4769 ------------
 cpp/src/arrow/visitor.cc                           |  169 -
 cpp/src/arrow/visitor.h                            |  152 -
 cpp/src/arrow/visitor_inline.h                     |  449 --
 cpp/src/gandiva/CMakeLists.txt                     |  249 -
 cpp/src/gandiva/GandivaConfig.cmake.in             |   36 -
 cpp/src/gandiva/annotator.cc                       |  118 -
 cpp/src/gandiva/annotator.h                        |   81 -
 cpp/src/gandiva/annotator_test.cc                  |  102 -
 cpp/src/gandiva/arrow.h                            |   57 -
 cpp/src/gandiva/basic_decimal_scalar.h             |   65 -
 cpp/src/gandiva/bitmap_accumulator.cc              |   75 -
 cpp/src/gandiva/bitmap_accumulator.h               |   79 -
 cpp/src/gandiva/bitmap_accumulator_test.cc         |  112 -
 cpp/src/gandiva/cache.cc                           |   45 -
 cpp/src/gandiva/cache.h                            |   59 -
 cpp/src/gandiva/cast_time.cc                       |   85 -
 cpp/src/gandiva/compiled_expr.h                    |   71 -
 cpp/src/gandiva/condition.h                        |   37 -
 cpp/src/gandiva/configuration.cc                   |   43 -
 cpp/src/gandiva/configuration.h                    |   84 -
 cpp/src/gandiva/context_helper.cc                  |   76 -
 cpp/src/gandiva/date_utils.cc                      |  232 -
 cpp/src/gandiva/date_utils.h                       |   52 -
 cpp/src/gandiva/decimal_ir.cc                      |  559 --
 cpp/src/gandiva/decimal_ir.h                       |  188 -
 cpp/src/gandiva/decimal_scalar.h                   |   76 -
 cpp/src/gandiva/decimal_type_util.cc               |   75 -
 cpp/src/gandiva/decimal_type_util.h                |   83 -
 cpp/src/gandiva/decimal_type_util_test.cc          |   58 -
 cpp/src/gandiva/decimal_xlarge.cc                  |  284 -
 cpp/src/gandiva/decimal_xlarge.h                   |   41 -
 cpp/src/gandiva/dex.h                              |  378 -
 cpp/src/gandiva/dex_visitor.h                      |   92 -
 cpp/src/gandiva/engine.cc                          |  338 -
 cpp/src/gandiva/engine.h                           |  104 -
 cpp/src/gandiva/engine_llvm_test.cc                |  131 -
 cpp/src/gandiva/eval_batch.h                       |  107 -
 cpp/src/gandiva/execution_context.h                |   54 -
 cpp/src/gandiva/exported_funcs.h                   |   59 -
 cpp/src/gandiva/exported_funcs_registry.cc         |   30 -
 cpp/src/gandiva/exported_funcs_registry.h          |   54 -
 cpp/src/gandiva/expr_decomposer.cc                 |  308 -
 cpp/src/gandiva/expr_decomposer.h                  |  125 -
 cpp/src/gandiva/expr_decomposer_test.cc            |  409 -
 cpp/src/gandiva/expr_validator.cc                  |  184 -
 cpp/src/gandiva/expr_validator.h                   |   78 -
 cpp/src/gandiva/expression.cc                      |   25 -
 cpp/src/gandiva/expression.h                       |   46 -
 cpp/src/gandiva/expression_registry.cc             |  187 -
 cpp/src/gandiva/expression_registry.h              |   71 -
 cpp/src/gandiva/expression_registry_test.cc        |   68 -
 cpp/src/gandiva/field_descriptor.h                 |   69 -
 cpp/src/gandiva/filter.cc                          |  163 -
 cpp/src/gandiva/filter.h                           |  112 -
 cpp/src/gandiva/formatting_utils.h                 |   69 -
 cpp/src/gandiva/func_descriptor.h                  |   50 -
 cpp/src/gandiva/function_holder.h                  |   34 -
 cpp/src/gandiva/function_holder_registry.h         |   73 -
 cpp/src/gandiva/function_ir_builder.cc             |   81 -
 cpp/src/gandiva/function_ir_builder.h              |   61 -
 cpp/src/gandiva/function_registry.cc               |   83 -
 cpp/src/gandiva/function_registry.h                |   47 -
 cpp/src/gandiva/function_registry_arithmetic.cc    |  111 -
 cpp/src/gandiva/function_registry_arithmetic.h     |   27 -
 cpp/src/gandiva/function_registry_common.h         |  263 -
 cpp/src/gandiva/function_registry_datetime.cc      |   94 -
 cpp/src/gandiva/function_registry_datetime.h       |   27 -
 cpp/src/gandiva/function_registry_hash.cc          |   63 -
 cpp/src/gandiva/function_registry_hash.h           |   27 -
 cpp/src/gandiva/function_registry_math_ops.cc      |  106 -
 cpp/src/gandiva/function_registry_math_ops.h       |   27 -
 cpp/src/gandiva/function_registry_string.cc        |  254 -
 cpp/src/gandiva/function_registry_string.h         |   27 -
 cpp/src/gandiva/function_registry_test.cc          |   96 -
 .../function_registry_timestamp_arithmetic.cc      |   84 -
 .../function_registry_timestamp_arithmetic.h       |   27 -
 cpp/src/gandiva/function_signature.cc              |  113 -
 cpp/src/gandiva/function_signature.h               |   55 -
 cpp/src/gandiva/function_signature_test.cc         |  113 -
 cpp/src/gandiva/gandiva.pc.in                      |   27 -
 cpp/src/gandiva/gandiva_aliases.h                  |   62 -
 cpp/src/gandiva/gdv_function_stubs.cc              | 1004 ---
 cpp/src/gandiva/gdv_function_stubs.h               |  111 -
 cpp/src/gandiva/gdv_function_stubs_test.cc         |  293 -
 cpp/src/gandiva/hash_utils.cc                      |  134 -
 cpp/src/gandiva/hash_utils.h                       |   44 -
 cpp/src/gandiva/hash_utils_test.cc                 |  164 -
 cpp/src/gandiva/in_holder.h                        |   91 -
 cpp/src/gandiva/jni/CMakeLists.txt                 |  109 -
 cpp/src/gandiva/jni/config_builder.cc              |   53 -
 cpp/src/gandiva/jni/config_holder.cc               |   30 -
 cpp/src/gandiva/jni/config_holder.h                |   68 -
 cpp/src/gandiva/jni/env_helper.h                   |   23 -
 cpp/src/gandiva/jni/expression_registry_helper.cc  |  190 -
 cpp/src/gandiva/jni/id_to_module_map.h             |   66 -
 cpp/src/gandiva/jni/jni_common.cc                  | 1039 ---
 cpp/src/gandiva/jni/module_holder.h                |   59 -
 cpp/src/gandiva/jni/symbols.map                    |   20 -
 cpp/src/gandiva/like_holder.cc                     |  100 -
 cpp/src/gandiva/like_holder.h                      |   59 -
 cpp/src/gandiva/like_holder_test.cc                |  130 -
 cpp/src/gandiva/literal_holder.cc                  |   45 -
 cpp/src/gandiva/literal_holder.h                   |   36 -
 cpp/src/gandiva/llvm_generator.cc                  | 1392 ----
 cpp/src/gandiva/llvm_generator.h                   |  251 -
 cpp/src/gandiva/llvm_generator_test.cc             |  116 -
 cpp/src/gandiva/llvm_includes.h                    |   43 -
 cpp/src/gandiva/llvm_types.cc                      |   48 -
 cpp/src/gandiva/llvm_types.h                       |  130 -
 cpp/src/gandiva/llvm_types_test.cc                 |   61 -
 cpp/src/gandiva/local_bitmaps_holder.h             |   85 -
 cpp/src/gandiva/lru_cache.h                        |  121 -
 cpp/src/gandiva/lru_cache_test.cc                  |   64 -
 cpp/src/gandiva/lvalue.h                           |   77 -
 cpp/src/gandiva/make_precompiled_bitcode.py        |   49 -
 cpp/src/gandiva/native_function.h                  |   81 -
 cpp/src/gandiva/node.h                             |  299 -
 cpp/src/gandiva/node_visitor.h                     |   53 -
 cpp/src/gandiva/pch.h                              |   24 -
 cpp/src/gandiva/precompiled/CMakeLists.txt         |  143 -
 cpp/src/gandiva/precompiled/arithmetic_ops.cc      |  259 -
 cpp/src/gandiva/precompiled/arithmetic_ops_test.cc |  140 -
 cpp/src/gandiva/precompiled/bitmap.cc              |   60 -
 cpp/src/gandiva/precompiled/bitmap_test.cc         |   62 -
 cpp/src/gandiva/precompiled/decimal_ops.cc         |  723 --
 cpp/src/gandiva/precompiled/decimal_ops.h          |   90 -
 cpp/src/gandiva/precompiled/decimal_ops_test.cc    | 1095 ---
 cpp/src/gandiva/precompiled/decimal_wrapper.cc     |  433 --
 cpp/src/gandiva/precompiled/epoch_time_point.h     |  104 -
 .../gandiva/precompiled/epoch_time_point_test.cc   |  103 -
 cpp/src/gandiva/precompiled/extended_math_ops.cc   |  370 -
 .../gandiva/precompiled/extended_math_ops_test.cc  |  276 -
 cpp/src/gandiva/precompiled/hash.cc                |  407 -
 cpp/src/gandiva/precompiled/hash_test.cc           |  122 -
 cpp/src/gandiva/precompiled/print.cc               |   28 -
 cpp/src/gandiva/precompiled/string_ops.cc          | 1523 ----
 cpp/src/gandiva/precompiled/string_ops_test.cc     | 1091 ---
 cpp/src/gandiva/precompiled/testing.h              |   43 -
 cpp/src/gandiva/precompiled/time.cc                |  831 --
 cpp/src/gandiva/precompiled/time_constants.h       |   30 -
 cpp/src/gandiva/precompiled/time_fields.h          |   35 -
 cpp/src/gandiva/precompiled/time_test.cc           |  746 --
 .../gandiva/precompiled/timestamp_arithmetic.cc    |  242 -
 cpp/src/gandiva/precompiled/types.h                |  457 --
 cpp/src/gandiva/precompiled_bitcode.cc.in          |   26 -
 cpp/src/gandiva/projector.cc                       |  362 -
 cpp/src/gandiva/projector.h                        |  143 -
 cpp/src/gandiva/proto/Types.proto                  |  245 -
 cpp/src/gandiva/random_generator_holder.cc         |   45 -
 cpp/src/gandiva/random_generator_holder.h          |   57 -
 cpp/src/gandiva/random_generator_holder_test.cc    |  103 -
 cpp/src/gandiva/regex_util.cc                      |   63 -
 cpp/src/gandiva/regex_util.h                       |   45 -
 cpp/src/gandiva/selection_vector.cc                |  179 -
 cpp/src/gandiva/selection_vector.h                 |  151 -
 cpp/src/gandiva/selection_vector_impl.h            |  108 -
 cpp/src/gandiva/selection_vector_test.cc           |  270 -
 cpp/src/gandiva/simple_arena.h                     |  160 -
 cpp/src/gandiva/simple_arena_test.cc               |  102 -
 cpp/src/gandiva/symbols.map                        |   35 -
 cpp/src/gandiva/tests/CMakeLists.txt               |   42 -
 cpp/src/gandiva/tests/binary_test.cc               |   89 -
 cpp/src/gandiva/tests/boolean_expr_test.cc         |  388 -
 cpp/src/gandiva/tests/date_time_test.cc            |  590 --
 cpp/src/gandiva/tests/decimal_single_test.cc       |  305 -
 cpp/src/gandiva/tests/decimal_test.cc              | 1194 ---
 cpp/src/gandiva/tests/filter_project_test.cc       |  276 -
 cpp/src/gandiva/tests/filter_test.cc               |  340 -
 cpp/src/gandiva/tests/generate_data.h              |  152 -
 cpp/src/gandiva/tests/hash_test.cc                 |  431 --
 cpp/src/gandiva/tests/huge_table_test.cc           |  157 -
 cpp/src/gandiva/tests/if_expr_test.cc              |  378 -
 cpp/src/gandiva/tests/in_expr_test.cc              |  196 -
 cpp/src/gandiva/tests/literal_test.cc              |  232 -
 cpp/src/gandiva/tests/micro_benchmarks.cc          |  456 --
 cpp/src/gandiva/tests/null_validity_test.cc        |  175 -
 .../tests/projector_build_validation_test.cc       |  287 -
 cpp/src/gandiva/tests/projector_test.cc            | 1013 ---
 cpp/src/gandiva/tests/test_util.h                  |  103 -
 cpp/src/gandiva/tests/timed_evaluate.h             |  136 -
 cpp/src/gandiva/tests/to_string_test.cc            |   88 -
 cpp/src/gandiva/tests/utf8_test.cc                 |  640 --
 cpp/src/gandiva/to_date_holder.cc                  |  116 -
 cpp/src/gandiva/to_date_holder.h                   |   58 -
 cpp/src/gandiva/to_date_holder_test.cc             |  152 -
 cpp/src/gandiva/tree_expr_builder.cc               |  221 -
 cpp/src/gandiva/tree_expr_builder.h                |  130 -
 cpp/src/gandiva/tree_expr_test.cc                  |  159 -
 cpp/src/gandiva/value_validity_pair.h              |   48 -
 cpp/src/gandiva/visibility.h                       |   48 -
 cpp/src/generated/File_generated.h                 |  200 -
 cpp/src/generated/Message_generated.h              |  659 --
 cpp/src/generated/Schema_generated.h               | 2265 ------
 cpp/src/generated/SparseTensor_generated.h         |  913 ---
 cpp/src/generated/Tensor_generated.h               |  387 -
 cpp/src/generated/feather_generated.h              |  863 ---
 cpp/src/generated/parquet_constants.cpp            |   17 -
 cpp/src/generated/parquet_constants.h              |   24 -
 cpp/src/generated/parquet_types.cpp                | 7411 ------------------
 cpp/src/generated/parquet_types.h                  | 2916 -------
 cpp/src/jni/CMakeLists.txt                         |   27 -
 cpp/src/jni/dataset/CMakeLists.txt                 |   65 -
 cpp/src/jni/dataset/jni_util.cc                    |  242 -
 cpp/src/jni/dataset/jni_util.h                     |  135 -
 cpp/src/jni/dataset/jni_util_test.cc               |  134 -
 cpp/src/jni/dataset/jni_wrapper.cc                 |  546 --
 cpp/src/jni/orc/CMakeLists.txt                     |   53 -
 cpp/src/jni/orc/concurrent_map.h                   |   77 -
 cpp/src/jni/orc/jni_wrapper.cpp                    |  311 -
 cpp/src/parquet/CMakeLists.txt                     |  425 --
 cpp/src/parquet/ParquetConfig.cmake.in             |   43 -
 cpp/src/parquet/README                             |   10 -
 cpp/src/parquet/api/CMakeLists.txt                 |   19 -
 cpp/src/parquet/api/io.h                           |   20 -
 cpp/src/parquet/api/reader.h                       |   35 -
 cpp/src/parquet/api/schema.h                       |   21 -
 cpp/src/parquet/api/writer.h                       |   25 -
 cpp/src/parquet/arrow/CMakeLists.txt               |   31 -
 cpp/src/parquet/arrow/arrow_reader_writer_test.cc  | 3945 ----------
 cpp/src/parquet/arrow/arrow_schema_test.cc         | 1563 ----
 cpp/src/parquet/arrow/fuzz.cc                      |   25 -
 cpp/src/parquet/arrow/generate_fuzz_corpus.cc      |  198 -
 cpp/src/parquet/arrow/path_internal.cc             |  900 ---
 cpp/src/parquet/arrow/path_internal.h              |  155 -
 cpp/src/parquet/arrow/path_internal_test.cc        |  597 --
 cpp/src/parquet/arrow/reader.cc                    | 1123 ---
 cpp/src/parquet/arrow/reader.h                     |  324 -
 cpp/src/parquet/arrow/reader_internal.cc           |  778 --
 cpp/src/parquet/arrow/reader_internal.h            |  122 -
 cpp/src/parquet/arrow/reader_writer_benchmark.cc   |  564 --
 cpp/src/parquet/arrow/reconstruct_internal_test.cc | 1639 ----
 cpp/src/parquet/arrow/schema.cc                    | 1053 ---
 cpp/src/parquet/arrow/schema.h                     |  184 -
 cpp/src/parquet/arrow/schema_internal.cc           |  225 -
 cpp/src/parquet/arrow/schema_internal.h            |   48 -
 cpp/src/parquet/arrow/test_util.h                  |  512 --
 cpp/src/parquet/arrow/writer.cc                    |  484 --
 cpp/src/parquet/arrow/writer.h                     |  105 -
 cpp/src/parquet/bloom_filter.cc                    |  162 -
 cpp/src/parquet/bloom_filter.h                     |  247 -
 cpp/src/parquet/bloom_filter_test.cc               |  247 -
 cpp/src/parquet/column_io_benchmark.cc             |  261 -
 cpp/src/parquet/column_page.h                      |  160 -
 cpp/src/parquet/column_reader.cc                   | 1726 -----
 cpp/src/parquet/column_reader.h                    |  333 -
 cpp/src/parquet/column_reader_test.cc              |  390 -
 cpp/src/parquet/column_scanner.cc                  |   91 -
 cpp/src/parquet/column_scanner.h                   |  262 -
 cpp/src/parquet/column_scanner_test.cc             |  235 -
 cpp/src/parquet/column_writer.cc                   | 2067 -----
 cpp/src/parquet/column_writer.h                    |  270 -
 cpp/src/parquet/column_writer_test.cc              | 1019 ---
 cpp/src/parquet/encoding.cc                        | 2527 -------
 cpp/src/parquet/encoding.h                         |  442 --
 cpp/src/parquet/encoding_benchmark.cc              |  802 --
 cpp/src/parquet/encoding_test.cc                   | 1247 ---
 cpp/src/parquet/encryption/CMakeLists.txt          |   19 -
 cpp/src/parquet/encryption/crypto_factory.cc       |  175 -
 cpp/src/parquet/encryption/crypto_factory.h        |  135 -
 cpp/src/parquet/encryption/encryption.cc           |  412 -
 cpp/src/parquet/encryption/encryption.h            |  510 --
 cpp/src/parquet/encryption/encryption_internal.cc  |  613 --
 cpp/src/parquet/encryption/encryption_internal.h   |  116 -
 .../encryption/encryption_internal_nossl.cc        |  110 -
 .../parquet/encryption/file_key_material_store.h   |   31 -
 cpp/src/parquet/encryption/file_key_unwrapper.cc   |  114 -
 cpp/src/parquet/encryption/file_key_unwrapper.h    |   66 -
 cpp/src/parquet/encryption/file_key_wrapper.cc     |  109 -
 cpp/src/parquet/encryption/file_key_wrapper.h      |   82 -
 .../parquet/encryption/internal_file_decryptor.cc  |  240 -
 .../parquet/encryption/internal_file_decryptor.h   |  121 -
 .../parquet/encryption/internal_file_encryptor.cc  |  170 -
 .../parquet/encryption/internal_file_encryptor.h   |  109 -
 cpp/src/parquet/encryption/key_encryption_key.h    |   61 -
 cpp/src/parquet/encryption/key_management_test.cc  |  225 -
 cpp/src/parquet/encryption/key_material.cc         |  159 -
 cpp/src/parquet/encryption/key_material.h          |  131 -
 cpp/src/parquet/encryption/key_metadata.cc         |   89 -
 cpp/src/parquet/encryption/key_metadata.h          |   94 -
 cpp/src/parquet/encryption/key_metadata_test.cc    |   77 -
 cpp/src/parquet/encryption/key_toolkit.cc          |   52 -
 cpp/src/parquet/encryption/key_toolkit.h           |   76 -
 cpp/src/parquet/encryption/key_toolkit_internal.cc |   80 -
 cpp/src/parquet/encryption/key_toolkit_internal.h  |   58 -
 cpp/src/parquet/encryption/key_wrapping_test.cc    |  103 -
 cpp/src/parquet/encryption/kms_client.cc           |   44 -
 cpp/src/parquet/encryption/kms_client.h            |   95 -
 cpp/src/parquet/encryption/kms_client_factory.h    |   40 -
 .../parquet/encryption/local_wrap_kms_client.cc    |  116 -
 cpp/src/parquet/encryption/local_wrap_kms_client.h |   96 -
 cpp/src/parquet/encryption/properties_test.cc      |  276 -
 .../parquet/encryption/read_configurations_test.cc |  272 -
 cpp/src/parquet/encryption/test_encryption_util.cc |  482 --
 cpp/src/parquet/encryption/test_encryption_util.h  |  113 -
 cpp/src/parquet/encryption/test_in_memory_kms.cc   |   81 -
 cpp/src/parquet/encryption/test_in_memory_kms.h    |   89 -
 .../encryption/two_level_cache_with_expiration.h   |  159 -
 .../two_level_cache_with_expiration_test.cc        |  177 -
 .../encryption/write_configurations_test.cc        |  234 -
 cpp/src/parquet/exception.cc                       |   27 -
 cpp/src/parquet/exception.h                        |  159 -
 cpp/src/parquet/file_deserialize_test.cc           |  372 -
 cpp/src/parquet/file_reader.cc                     |  665 --
 cpp/src/parquet/file_reader.h                      |  151 -
 cpp/src/parquet/file_serialize_test.cc             |  471 --
 cpp/src/parquet/file_writer.cc                     |  547 --
 cpp/src/parquet/file_writer.h                      |  234 -
 cpp/src/parquet/hasher.h                           |   72 -
 cpp/src/parquet/level_comparison.cc                |   82 -
 cpp/src/parquet/level_comparison.h                 |   40 -
 cpp/src/parquet/level_comparison_avx2.cc           |   34 -
 cpp/src/parquet/level_comparison_inc.h             |   65 -
 cpp/src/parquet/level_conversion.cc                |  183 -
 cpp/src/parquet/level_conversion.h                 |  199 -
 cpp/src/parquet/level_conversion_benchmark.cc      |   80 -
 cpp/src/parquet/level_conversion_bmi2.cc           |   33 -
 cpp/src/parquet/level_conversion_inc.h             |  357 -
 cpp/src/parquet/level_conversion_test.cc           |  361 -
 cpp/src/parquet/metadata.cc                        | 1783 -----
 cpp/src/parquet/metadata.h                         |  484 --
 cpp/src/parquet/metadata_test.cc                   |  558 --
 cpp/src/parquet/murmur3.cc                         |  222 -
 cpp/src/parquet/murmur3.h                          |   54 -
 cpp/src/parquet/parquet.pc.in                      |   31 -
 cpp/src/parquet/parquet.thrift                     | 1058 ---
 cpp/src/parquet/parquet_version.h.in               |   31 -
 cpp/src/parquet/pch.h                              |   28 -
 cpp/src/parquet/platform.cc                        |   41 -
 cpp/src/parquet/platform.h                         |  111 -
 cpp/src/parquet/printer.cc                         |  297 -
 cpp/src/parquet/printer.h                          |   46 -
 cpp/src/parquet/properties.cc                      |   64 -
 cpp/src/parquet/properties.h                       |  801 --
 cpp/src/parquet/properties_test.cc                 |   90 -
 cpp/src/parquet/public_api_test.cc                 |   49 -
 cpp/src/parquet/reader_test.cc                     |  629 --
 cpp/src/parquet/schema.cc                          |  946 ---
 cpp/src/parquet/schema.h                           |  496 --
 cpp/src/parquet/schema_internal.h                  |   54 -
 cpp/src/parquet/schema_test.cc                     | 2226 ------
 cpp/src/parquet/statistics.cc                      |  848 ---
 cpp/src/parquet/statistics.h                       |  342 -
 cpp/src/parquet/statistics_test.cc                 | 1140 ---
 cpp/src/parquet/stream_reader.cc                   |  521 --
 cpp/src/parquet/stream_reader.h                    |  299 -
 cpp/src/parquet/stream_reader_test.cc              |  916 ---
 cpp/src/parquet/stream_writer.cc                   |  324 -
 cpp/src/parquet/stream_writer.h                    |  243 -
 cpp/src/parquet/stream_writer_test.cc              |  419 --
 cpp/src/parquet/symbols.map                        |   40 -
 cpp/src/parquet/test_util.cc                       |  136 -
 cpp/src/parquet/test_util.h                        |  709 --
 cpp/src/parquet/thrift_internal.h                  |  490 --
 cpp/src/parquet/type_fwd.h                         |   43 -
 cpp/src/parquet/types.cc                           | 1572 ----
 cpp/src/parquet/types.h                            |  716 --
 cpp/src/parquet/types_test.cc                      |  172 -
 cpp/src/parquet/windows_compatibility.h            |   30 -
 cpp/src/plasma/.gitignore                          |   18 -
 cpp/src/plasma/CMakeLists.txt                      |  238 -
 cpp/src/plasma/PlasmaConfig.cmake.in               |   39 -
 cpp/src/plasma/client.cc                           | 1224 ---
 cpp/src/plasma/client.h                            |  309 -
 cpp/src/plasma/common.cc                           |  195 -
 cpp/src/plasma/common.fbs                          |   39 -
 cpp/src/plasma/common.h                            |  155 -
 cpp/src/plasma/common_generated.h                  |  230 -
 cpp/src/plasma/compat.h                            |   32 -
 cpp/src/plasma/dlmalloc.cc                         |  166 -
 cpp/src/plasma/events.cc                           |  107 -
 cpp/src/plasma/events.h                            |  108 -
 cpp/src/plasma/eviction_policy.cc                  |  175 -
 cpp/src/plasma/eviction_policy.h                   |  209 -
 cpp/src/plasma/external_store.cc                   |   63 -
 cpp/src/plasma/external_store.h                    |  120 -
 cpp/src/plasma/fling.cc                            |  129 -
 cpp/src/plasma/fling.h                             |   52 -
 cpp/src/plasma/hash_table_store.cc                 |   58 -
 cpp/src/plasma/hash_table_store.h                  |   50 -
 cpp/src/plasma/io.cc                               |  250 -
 cpp/src/plasma/io.h                                |   67 -
 .../org_apache_arrow_plasma_PlasmaClientJNI.cc     |  263 -
 .../java/org_apache_arrow_plasma_PlasmaClientJNI.h |  141 -
 cpp/src/plasma/malloc.cc                           |   70 -
 cpp/src/plasma/malloc.h                            |   51 -
 cpp/src/plasma/plasma.cc                           |   99 -
 cpp/src/plasma/plasma.fbs                          |  357 -
 cpp/src/plasma/plasma.h                            |  175 -
 cpp/src/plasma/plasma.pc.in                        |   33 -
 cpp/src/plasma/plasma_allocator.cc                 |   56 -
 cpp/src/plasma/plasma_allocator.h                  |   61 -
 cpp/src/plasma/plasma_generated.h                  | 3984 ----------
 cpp/src/plasma/protocol.cc                         |  829 --
 cpp/src/plasma/protocol.h                          |  251 -
 cpp/src/plasma/quota_aware_policy.cc               |  177 -
 cpp/src/plasma/quota_aware_policy.h                |   88 -
 cpp/src/plasma/store.cc                            | 1353 ----
 cpp/src/plasma/store.h                             |  245 -
 cpp/src/plasma/symbols.map                         |   34 -
 cpp/src/plasma/test/client_tests.cc                | 1084 ---
 cpp/src/plasma/test/external_store_tests.cc        |  143 -
 cpp/src/plasma/test/serialization_tests.cc         |  333 -
 cpp/src/plasma/test_util.h                         |   46 -
 cpp/src/plasma/thirdparty/ae/ae.c                  |  465 --
 cpp/src/plasma/thirdparty/ae/ae.h                  |  121 -
 cpp/src/plasma/thirdparty/ae/ae_epoll.c            |  137 -
 cpp/src/plasma/thirdparty/ae/ae_evport.c           |  320 -
 cpp/src/plasma/thirdparty/ae/ae_kqueue.c           |  138 -
 cpp/src/plasma/thirdparty/ae/ae_select.c           |  106 -
 cpp/src/plasma/thirdparty/ae/config.h              |   52 -
 cpp/src/plasma/thirdparty/ae/zmalloc.h             |   43 -
 cpp/src/plasma/thirdparty/dlmalloc.c               | 6296 ----------------
 cpp/submodules/parquet-testing                     |    1 -
 cpp/thirdparty/README.md                           |   25 -
 cpp/thirdparty/download_dependencies.sh            |   63 -
 .../flatbuffers/include/flatbuffers/base.h         |  398 -
 .../flatbuffers/include/flatbuffers/flatbuffers.h  | 2783 -------
 .../include/flatbuffers/stl_emulation.h            |  307 -
 cpp/thirdparty/hadoop/include/hdfs.h               | 1024 ---
 cpp/thirdparty/versions.txt                        |   92 -
 cpp/tools/parquet/CMakeLists.txt                   |   36 -
 cpp/tools/parquet/parquet_dump_schema.cc           |   52 -
 cpp/tools/parquet/parquet_reader.cc                |   82 -
 cpp/tools/parquet/parquet_scan.cc                  |   78 -
 cpp/valgrind.supp                                  |   53 -
 cpp/vcpkg.json                                     |   40 -
 1436 files changed, 490175 deletions(-)

diff --git a/cpp/.gitignore b/cpp/.gitignore
deleted file mode 100644
index 03c03a4..0000000
--- a/cpp/.gitignore
+++ /dev/null
@@ -1,43 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-thirdparty/*.tar*
-CMakeFiles/
-CMakeCache.txt
-CTestTestfile.cmake
-Makefile
-cmake_install.cmake
-build/
-*-build/
-Testing/
-build-support/boost_*
-
-# Build directories created by Clion
-cmake-build-*/
-
-#########################################
-# Editor temporary/working/backup files #
-.#*
-*\#*\#
-[#]*#
-*~
-*$
-*.bak
-*flymake*
-*.kdev4
-*.log
-*.swp
diff --git a/cpp/Brewfile b/cpp/Brewfile
deleted file mode 100644
index 7de6c7d..0000000
--- a/cpp/Brewfile
+++ /dev/null
@@ -1,44 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-brew "automake"
-brew "boost"
-brew "brotli"
-brew "c-ares"
-brew "ccache"
-brew "cmake"
-brew "flatbuffers"
-brew "git"
-brew "glog"
-brew "grpc"
-brew "llvm"
-brew "llvm@8"
-brew "lz4"
-brew "minio"
-brew "ninja"
-brew "numpy"
-brew "openssl@1.1"
-brew "protobuf"
-brew "python"
-brew "rapidjson"
-# grpc bundles re2 and causes a conflict when Homebrew tries to install it,
-# so temporarily skip installing re2. See ARROW-9972.
-# brew "re2"
-brew "snappy"
-brew "thrift"
-brew "wget"
-brew "zstd"
diff --git a/cpp/CHANGELOG_PARQUET.md b/cpp/CHANGELOG_PARQUET.md
deleted file mode 100644
index 06a09c2..0000000
--- a/cpp/CHANGELOG_PARQUET.md
+++ /dev/null
@@ -1,501 +0,0 @@
-Parquet C++ 1.5.0
---------------------------------------------------------------------------------
-## Bug
-    * [PARQUET-979] - [C++] Limit size of min, max or disable stats for long binary types
-    * [PARQUET-1071] - [C++] parquet::arrow::FileWriter::Close is not idempotent
-    * [PARQUET-1349] - [C++] PARQUET_RPATH_ORIGIN is not picked by the build
-    * [PARQUET-1334] - [C++] memory_map parameter seems missleading in parquet file opener
-    * [PARQUET-1333] - [C++] Reading of files with dictionary size 0 fails on Windows with bad_alloc
-    * [PARQUET-1283] - [C++] FormatStatValue appends trailing space to string and int96
-    * [PARQUET-1270] - [C++] Executable tools do not get installed
-    * [PARQUET-1272] - [C++] ScanFileContents reports wrong row count for nested columns
-    * [PARQUET-1268] - [C++] Conversion of Arrow null list columns fails
-    * [PARQUET-1255] - [C++] Exceptions thrown in some tests
-    * [PARQUET-1358] - [C++] index_page_offset should be unset as it is not supported.
-    * [PARQUET-1357] - [C++] FormatStatValue truncates binary statistics on zero character
-    * [PARQUET-1319] - [C++] Pass BISON_EXECUTABLE to Thrift EP for MacOS
-    * [PARQUET-1313] - [C++] Compilation failure with VS2017
-    * [PARQUET-1315] - [C++] ColumnChunkMetaData.has_dictionary_page() should return bool, not int64_t
-    * [PARQUET-1307] - [C++] memory-test fails with latest Arrow
-    * [PARQUET-1274] - [Python] SegFault in pyarrow.parquet.write_table with specific options
-    * [PARQUET-1209] - locally defined symbol ... imported in function ..
-    * [PARQUET-1245] - [C++] Segfault when writing Arrow table with duplicate columns
-    * [PARQUET-1273] - [Python] Error writing to partitioned Parquet dataset
-    * [PARQUET-1384] - [C++] Clang compiler warnings in bloom_filter-test.cc
-
-## Improvement
-    * [PARQUET-1348] - [C++] Allow Arrow FileWriter To Write FileMetaData
-    * [PARQUET-1346] - [C++] Protect against null values data in empty Arrow array
-    * [PARQUET-1340] - [C++] Fix Travis Ci valgrind errors related to std::random_device
-    * [PARQUET-1323] - [C++] Fix compiler warnings with clang-6.0
-    * [PARQUET-1279] - Use ASSERT_NO_FATAIL_FAILURE in C++ unit tests
-    * [PARQUET-1262] - [C++] Use the same BOOST_ROOT and Boost_NAMESPACE for Thrift
-    * [PARQUET-1267] - replace "unsafe" std::equal by std::memcmp
-    * [PARQUET-1360] - [C++] Minor API + style changes follow up to PARQUET-1348
-    * [PARQUET-1166] - [API Proposal] Add GetRecordBatchReader in parquet/arrow/reader.h
-    * [PARQUET-1378] - [c++] Allow RowGroups with zero rows to be written
-    * [PARQUET-1256] - [C++] Add --print-key-value-metadata option to parquet_reader tool
-    * [PARQUET-1276] - [C++] Reduce the amount of memory used for writing null decimal values
-
-## New Feature
-    * [PARQUET-1392] - [C++] Supply row group indices to parquet::arrow::FileReader::ReadTable
-
-## Sub-task
-    * [PARQUET-1227] - Thrift crypto metadata structures
-    * [PARQUET-1332] - [C++] Add bloom filter utility class
-
-## Task
-    * [PARQUET-1350] - [C++] Use abstract ResizableBuffer instead of concrete PoolBuffer
-    * [PARQUET-1366] - [C++] Streamline use of Arrow bit-util.h
-    * [PARQUET-1308] - [C++] parquet::arrow should use thread pool, not ParallelFor
-    * [PARQUET-1382] - [C++] Prepare for arrow::test namespace removal
-    * [PARQUET-1372] - [C++] Add an API to allow writing RowGroups based on their size rather than num_rows
-
-
-Parquet C++ 1.4.0
---------------------------------------------------------------------------------
-## Bug
-    * [PARQUET-1193] - [CPP] Implement ColumnOrder to support min_value and max_value
-    * [PARQUET-1180] - C++: Fix behaviour of num_children element of primitive nodes
-    * [PARQUET-1146] - C++: Add macOS-compatible sha512sum call to release verify script
-    * [PARQUET-1167] - [C++] FieldToNode function should return a status when throwing an exception
-    * [PARQUET-1175] - [C++] Fix usage of deprecated Arrow API
-    * [PARQUET-1113] - [C++] Incorporate fix from ARROW-1601 on bitmap read path
-    * [PARQUET-1111] - dev/release/verify-release-candidate has stale help
-    * [PARQUET-1109] - C++: Update release verification script to SHA512
-    * [PARQUET-1179] - [C++] Support Apache Thrift 0.11
-    * [PARQUET-1226] - [C++] Fix new build warnings with clang 5.0
-    * [PARQUET-1233] - [CPP ]Enable option to switch between stl classes and boost classes for thrift header
-    * [PARQUET-1205] - Fix msvc static build
-    * [PARQUET-1210] - [C++] Boost 1.66 compilation fails on Windows on linkage stage
-
-## Improvement
-    * [PARQUET-1092] - [C++] Write Arrow tables with chunked columns
-    * [PARQUET-1086] - [C++] Remove usage of arrow/util/compiler-util.h after 1.3.0 release
-    * [PARQUET-1097] - [C++] Account for Arrow API deprecation in ARROW-1511
-    * [PARQUET-1150] - C++: Hide statically linked boost symbols
-    * [PARQUET-1151] - [C++] Add build options / configuration to use static runtime libraries with MSVC
-    * [PARQUET-1147] - [C++] Account for API deprecation / change in ARROW-1671
-    * [PARQUET-1162] - C++: Update dev/README after migration to Gitbox
-    * [PARQUET-1165] - [C++] Pin clang-format version to 4.0
-    * [PARQUET-1164] - [C++] Follow API changes in ARROW-1808
-    * [PARQUET-1177] - [C++] Add more extensive compiler warnings when using Clang
-    * [PARQUET-1110] - [C++] Release verification script for Windows
-    * [PARQUET-859] - [C++] Flatten parquet/file directory
-    * [PARQUET-1220] - [C++] Don't build Thrift examples and tutorials in the ExternalProject
-    * [PARQUET-1219] - [C++] Update release-candidate script links to gitbox
-    * [PARQUET-1196] - [C++] Provide a parquet_arrow example project incl. CMake setup
-    * [PARQUET-1200] - [C++] Support reading a single Arrow column from a Parquet file
-
-## New Feature
-    * [PARQUET-1095] - [C++] Read and write Arrow decimal values
-    * [PARQUET-970] - Add Add Lz4 and Zstd compression codecs
-
-## Task
-    * [PARQUET-1221] - [C++] Extend release README
-    * [PARQUET-1225] - NaN values may lead to incorrect filtering under certain circumstances
-
-
-Parquet C++ 1.3.1
---------------------------------------------------------------------------------
-## Bug
-    * [PARQUET-1105] - [CPP] Remove libboost_system dependency
-    * [PARQUET-1138] - [C++] Fix compilation with Arrow 0.7.1
-    * [PARQUET-1123] - [C++] Update parquet-cpp to use Arrow's AssertArraysEqual
-    * [PARQUET-1121] - C++: DictionaryArrays of NullType cannot be written
-    * [PARQUET-1139] - Add license to cmake_modules/parquet-cppConfig.cmake.in
-
-## Improvement
-    * [PARQUET-1140] - [C++] Fail on RAT errors in CI
-    * [PARQUET-1070] - Add CPack support to the build
-
-
-Parquet C++ 1.3.0
---------------------------------------------------------------------------------
-## Bug
-    * [PARQUET-1098] - [C++] Install new header in parquet/util
-    * [PARQUET-1085] - [C++] Backwards compatibility from macro cleanup in transitive dependencies in ARROW-1452
-    * [PARQUET-1074] - [C++] Switch to long key ids in KEYs file
-    * [PARQUET-1075] - C++: Coverage upload is broken
-    * [PARQUET-1088] - [CPP] remove parquet_version.h from version control since it gets auto generated
-    * [PARQUET-1002] - [C++] Compute statistics based on Logical Types
-    * [PARQUET-1100] - [C++] Reading repeated types should decode number of records rather than number of values
-    * [PARQUET-1090] - [C++] Fix int32 overflow in Arrow table writer, add max row group size property
-    * [PARQUET-1108] - [C++] Fix Int96 comparators
-
-## Improvement
-    * [PARQUET-1104] - [C++] Upgrade to Apache Arrow 0.7.0 RC0
-    * [PARQUET-1072] - [C++] Add ARROW_NO_DEPRECATED_API to CI to check for deprecated API use
-    * [PARQUET-1096] - C++: Update sha{1, 256, 512} checksums per latest ASF release policy
-    * [PARQUET-1079] - [C++] Account for Arrow API change in ARROW-1335
-    * [PARQUET-1087] - [C++] Add wrapper for ScanFileContents in parquet::arrow that catches exceptions
-    * [PARQUET-1093] - C++: Improve Arrow level generation error message
-    * [PARQUET-1094] - C++: Add benchmark for boolean Arrow column I/O
-    * [PARQUET-1083] - [C++] Refactor core logic in parquet-scan.cc so that it can be used as a library function for benchmarking
-    * [PARQUET-1037] - Allow final RowGroup to be unfilled
-
-## New Feature
-    * [PARQUET-1078] - [C++] Add Arrow writer option to coerce timestamps to milliseconds or microseconds
-    * [PARQUET-929] - [C++] Handle arrow::DictionaryArray when writing Arrow data
-
-
-Parquet C++ 1.2.0
---------------------------------------------------------------------------------
-## Bug
-    * [PARQUET-1029] - [C++] TypedColumnReader/TypeColumnWriter symbols are no longer being exported
-    * [PARQUET-997] - Fix override compiler warnings
-    * [PARQUET-1033] - Mismatched Read and Write
-    * [PARQUET-1007] - [C++ ] Update parquet.thrift from https://github.com/apache/parquet-format
-    * [PARQUET-1039] - PARQUET-911 Breaks Arrow
-    * [PARQUET-1038] - Key value metadata should be nullptr if not set
-    * [PARQUET-1018] - [C++] parquet.dll has runtime dependencies on one or more libraries in the build toolchain
-    * [PARQUET-1003] - [C++] Modify DEFAULT_CREATED_BY value for every new release version
-    * [PARQUET-1004] - CPP Building fails on windows
-    * [PARQUET-1040] - Missing writer method implementations
-    * [PARQUET-1054] - [C++] Account for Arrow API changes in ARROW-1199
-    * [PARQUET-1042] - C++: Compilation breaks on GCC 4.8
-    * [PARQUET-1048] - [C++] Static linking of libarrow is no longer supported
-    * [PARQUET-1013] - Fix ZLIB_INCLUDE_DIR
-    * [PARQUET-998] - C++: Release script is not usable
-    * [PARQUET-1023] - [C++] Brotli libraries are not being statically linked on Windows
-    * [PARQUET-1000] - [C++] Do not build thirdparty Arrow with /WX on MSVC
-    * [PARQUET-1052] - [C++] add_compiler_export_flags() throws warning with CMake >= 3.3
-    * [PARQUET-1069] - C++: ./dev/release/verify-release-candidate is broken due to missing Arrow dependencies
-
-## Improvement
-    * [PARQUET-996] - Improve MSVC build - ThirdpartyToolchain - Arrow
-    * [PARQUET-911] - C++: Support nested structs in parquet_arrow
-    * [PARQUET-986] - Improve MSVC build - ThirdpartyToolchain - Thrift
-    * [PARQUET-864] - [C++] Consolidate non-Parquet-specific bit utility code into Apache Arrow
-    * [PARQUET-1043] - [C++] Raise minimum supported CMake version to 3.2
-    * [PARQUET-1016] - Upgrade thirdparty Arrow to 0.4.0
-    * [PARQUET-858] - [C++] Flatten parquet/column directory, consolidate related code
-    * [PARQUET-978] - [C++] Minimizing footer reads for small(ish) metadata
-    * [PARQUET-991] - [C++] Fix compiler warnings on MSVC and build with /WX in Appveyor
-    * [PARQUET-863] - [C++] Move SIMD, CPU info, hashing, and other generic utilities into Apache Arrow
-    * [PARQUET-1053] - Fix unused result warnings due to unchecked Statuses
-    * [PARQUET-1067] - C++: Update arrow hash to 0.5.0
-    * [PARQUET-1041] - C++: Support Arrow's NullArray
-    * [PARQUET-1008] - Update TypedColumnReader::ReadBatch method to accept batch_size as int64_t
-    * [PARQUET-1044] - [C++] Use compression libraries from Apache Arrow
-    * [PARQUET-999] - Improve MSVC build - Enable PARQUET_BUILD_BENCHMARKS
-    * [PARQUET-967] - [C++] Combine libparquet/libparquet_arrow libraries
-    * [PARQUET-1045] - [C++] Refactor to account for computational utility code migration in ARROW-1154
-
-## New Feature
-    * [PARQUET-1035] - Write Int96 from Arrow Timestamp(ns)
-
-## Task
-    * [PARQUET-994] - C++: release-candidate script should not push to master
-    * [PARQUET-902] - [C++] Move compressor interfaces into Apache Arrow
-
-## Test
-    * [PARQUET-706] - [C++] Create test case that uses libparquet as a 3rd party library
-
-
-Parquet C++ 1.1.0
---------------------------------------------------------------------------------
-## Bug
-    * [PARQUET-898] - [C++] Change Travis CI OS X image to Xcode 6.4 and fix our thirdparty build
-    * [PARQUET-976] - [C++] Pass unit test suite with MSVC, build in Appveyor
-    * [PARQUET-963] - [C++] Disallow reading struct types in Arrow reader for now
-    * [PARQUET-959] - [C++] Arrow thirdparty build fails on multiarch systems
-    * [PARQUET-962] - [C++] GTEST_MAIN_STATIC_LIB is not defined in FindGTest.cmake
-    * [PARQUET-958] - [C++] Print Parquet metadata in JSON format
-    * [PARQUET-956] - C++: BUILD_BYPRODUCTS not specified anymore for gtest
-    * [PARQUET-948] - [C++] Account for API changes in ARROW-782
-    * [PARQUET-947] - [C++] Refactor to account for ARROW-795 Arrow core library consolidation
-    * [PARQUET-965] - [C++] FIXED_LEN_BYTE_ARRAY types are unhandled in the Arrow reader
-    * [PARQUET-949] - [C++] Arrow version pinning seems to not be working properly
-    * [PARQUET-955] - [C++] pkg_check_modules will override $ARROW_HOME if it is set in the environment
-    * [PARQUET-945] - [C++] Thrift static libraries are not used with recent patch
-    * [PARQUET-943] - [C++] Overflow build error on x86
-    * [PARQUET-938] - [C++] There is a typo in cmake_modules/FindSnappy.cmake comment
-    * [PARQUET-936] - [C++] parquet::arrow::WriteTable can enter infinite loop if chunk_size is 0
-    * [PARQUET-981] - Repair usage of *_HOME 3rd party dependencies environment variables during Windows build
-    * [PARQUET-992] - [C++] parquet/compression.h leaks zlib.h
-    * [PARQUET-987] - [C++] Fix regressions caused by PARQUET-981
-    * [PARQUET-933] - [C++] Account for Arrow Table API changes coming in ARROW-728
-    * [PARQUET-915] - Support Arrow Time Types in Schema
-    * [PARQUET-914] - [C++] Throw more informative exception when user writes too many values to a column in a row group
-    * [PARQUET-923] - [C++] Account for Time metadata changes in ARROW-686
-    * [PARQUET-918] - FromParquetSchema API crashes on nested schemas
-    * [PARQUET-925] - [C++] FindArrow.cmake sets the wrong library path after ARROW-648
-    * [PARQUET-932] - [c++] Add option to build parquet library with minimal dependency
-    * [PARQUET-919] - [C++] Account for API changes in ARROW-683
-    * [PARQUET-995] - [C++] Int96 reader in parquet_arrow uses size of Int96Type instead of Int96
-
-## Improvement
-    * [PARQUET-508] - Add ParquetFilePrinter
-    * [PARQUET-595] - Add API for key-value metadata
-    * [PARQUET-897] - [C++] Only use designated public headers from libarrow
-    * [PARQUET-679] - [C++] Build and unit tests support for MSVC on Windows
-    * [PARQUET-977] - Improve MSVC build
-    * [PARQUET-957] - [C++] Add optional $PARQUET_BUILD_TOOLCHAIN environment variable option for configuring build environment
-    * [PARQUET-961] - [C++] Strip debug symbols from libparquet libraries in release builds by default
-    * [PARQUET-954] - C++: Use Brolti 0.6 release
-    * [PARQUET-953] - [C++] Change arrow::FileWriter API to be initialized from a Schema, and provide for writing multiple tables
-    * [PARQUET-941] - [C++] Stop needless Boost static library detection for CentOS 7 support
-    * [PARQUET-942] - [C++] Fix wrong variabe use in FindSnappy
-    * [PARQUET-939] - [C++] Support Thrift_HOME CMake variable like FindSnappy does as Snappy_HOME
-    * [PARQUET-940] - [C++] Fix Arrow library path detection
-    * [PARQUET-937] - [C++] Support CMake < 3.4 again for Arrow detection
-    * [PARQUET-935] - [C++] Set shared library version for .deb packages
-    * [PARQUET-934] - [C++] Support multiarch on Debian
-    * [PARQUET-984] - C++: Add abi and so version to pkg-config
-    * [PARQUET-983] - C++: Update Thirdparty hash to Arrow 0.3.0
-    * [PARQUET-989] - [C++] Link dynamically to libarrow in toolchain build, set LD_LIBRARY_PATH
-    * [PARQUET-988] - [C++] Add Linux toolchain-based build to Travis CI
-    * [PARQUET-928] - [C++] Support pkg-config
-    * [PARQUET-927] - [C++] Specify shared library version of Apache Arrow
-    * [PARQUET-931] - [C++] Add option to pin thirdparty Arrow version used in ExternalProject
-    * [PARQUET-926] - [C++] Use pkg-config to find Apache Arrow
-    * [PARQUET-917] - C++: Build parquet_arrow by default
-    * [PARQUET-910] - C++: Support TIME logical type in parquet_arrow
-    * [PARQUET-909] - [CPP]: Reduce buffer allocations (mallocs) on critical path
-
-## New Feature
-    * [PARQUET-853] - [C++] Add option to link with shared boost libraries when building Arrow in the thirdparty toolchain
-    * [PARQUET-946] - [C++] Refactoring in parquet::arrow::FileReader to be able to read a single row group
-    * [PARQUET-930] - [C++] Account for all Arrow date/time types
-
-
-Parquet C++ 1.0.0
---------------------------------------------------------------------------------
-## Bug
-    * [PARQUET-455] - Fix compiler warnings on OS X / Clang
-    * [PARQUET-558] - Support ZSH in build scripts
-    * [PARQUET-720] - Parquet-cpp fails to link when included in multiple TUs
-    * [PARQUET-718] - Reading boolean pages written by parquet-cpp fails
-    * [PARQUET-640] - [C++] Force the use of gcc 4.9 in conda builds
-    * [PARQUET-643] - Add const modifier to schema pointer reference in ParquetFileWriter
-    * [PARQUET-672] - [C++] Build testing conda artifacts in debug mode
-    * [PARQUET-661] - [C++] Do not assume that perl is found in /usr/bin
-    * [PARQUET-659] - [C++] Instantiated template visibility is broken on clang / OS X
-    * [PARQUET-657] - [C++] Don't define DISALLOW_COPY_AND_ASSIGN if already defined
-    * [PARQUET-656] - [C++] Revert PARQUET-653
-    * [PARQUET-676] - MAX_VALUES_PER_LITERAL_RUN causes RLE encoding failure
-    * [PARQUET-614] - C++: Remove unneeded LZ4-related code
-    * [PARQUET-604] - Install writer.h headers
-    * [PARQUET-621] - C++: Uninitialised DecimalMetadata is read
-    * [PARQUET-620] - C++: Duplicate calls to ParquetFileWriter::Close cause duplicate metdata writes
-    * [PARQUET-599] - ColumnWriter::RleEncodeLevels' size estimation might be wrong
-    * [PARQUET-617] - C++: Enable conda build to work on systems with non-default C++ toolchains
-    * [PARQUET-627] - Ensure that thrift headers are generated before source compilation
-    * [PARQUET-745] - TypedRowGroupStatistics fails to PlainDecode min and max in ByteArrayType
-    * [PARQUET-738] - Update arrow version that also supports newer Xcode
-    * [PARQUET-747] - [C++] TypedRowGroupStatistics are not being exported in libparquet.so
-    * [PARQUET-711] - Use metadata builders in parquet writer
-    * [PARQUET-732] - Building a subset of dependencies does not work
-    * [PARQUET-760] - On switching from dictionary to the fallback encoding, an incorrect encoding is set
-    * [PARQUET-691] - [C++] Write ColumnChunk metadata after each column chunk in the file
-    * [PARQUET-797] - [C++] Update for API changes in ARROW-418
-    * [PARQUET-837] - [C++] SerializedFile::ParseMetaData uses Seek, followed by Read, and could have race conditions
-    * [PARQUET-827] - [C++] Incorporate addition of arrow::MemoryPool::Reallocate
-    * [PARQUET-502] - Scanner segfaults when its batch size is smaller than the number of rows
-    * [PARQUET-469] - Roll back Thrift bindings to 0.9.0
-    * [PARQUET-889] - Fix compilation when PARQUET_USE_SSE is on
-    * [PARQUET-888] - C++ Memory leak in RowGroupSerializer
-    * [PARQUET-819] - C++: Trying to install non-existing parquet/arrow/utils.h
-    * [PARQUET-736] - XCode 8.0 breaks builds
-    * [PARQUET-505] - Column reader: automatically handle large data pages
-    * [PARQUET-615] - C++: Building static or shared libparquet should not be mutually exclusive
-    * [PARQUET-658] - ColumnReader has no virtual destructor
-    * [PARQUET-799] - concurrent usage of the file reader API
-    * [PARQUET-513] - Valgrind errors are not failing the Travis CI build
-    * [PARQUET-841] - [C++] Writing wrong format version when using ParquetVersion::PARQUET_1_0
-    * [PARQUET-742] - Add missing license headers
-    * [PARQUET-741] - compression_buffer_ is reused although it shouldn't
-    * [PARQUET-700] - C++: Disable dictionary encoding for boolean columns
-    * [PARQUET-662] - [C++] ParquetException must be explicitly exported in dynamic libraries
-    * [PARQUET-704] - [C++] scan-all.h is not being installed
-    * [PARQUET-865] - C++: Pass all CXXFLAGS to Thrift ExternalProject
-    * [PARQUET-875] - [C++] Fix coveralls build given changes to thirdparty build procedure
-    * [PARQUET-709] - [C++] Fix conda dev binary builds
-    * [PARQUET-638] - [C++] Revert static linking of libstdc++ in conda builds until symbol visibility addressed
-    * [PARQUET-606] - Travis coverage is broken
-    * [PARQUET-880] - [CPP] Prevent destructors from throwing
-    * [PARQUET-886] - [C++] Revise build documentation and requirements in README.md
-    * [PARQUET-900] - C++: Fix NOTICE / LICENSE issues
-    * [PARQUET-885] - [C++] Do not search for Thrift in default system paths
-    * [PARQUET-879] - C++: ExternalProject compilation for Thrift fails on older CMake versions
-    * [PARQUET-635] - [C++] Statically link libstdc++ on Linux in conda recipe
-    * [PARQUET-710] - Remove unneeded private member variables from RowGroupReader ABI
-    * [PARQUET-766] - C++: Expose ParquetFileReader through Arrow reader as const
-    * [PARQUET-876] - C++: Correct snapshot version
-    * [PARQUET-821] - [C++] zlib download link is broken
-    * [PARQUET-818] - [C++] Refactor library to share IO, Buffer, and memory management abstractions with Apache Arrow
-    * [PARQUET-537] - LocalFileSource leaks resources
-    * [PARQUET-764] - [CPP] Parquet Writer does not write Boolean values correctly
-    * [PARQUET-812] - [C++] Failure reading BYTE_ARRAY data from file in parquet-compatibility project
-    * [PARQUET-759] - Cannot store columns consisting of empty strings
-    * [PARQUET-846] - [CPP] CpuInfo::Init() is not thread safe
-    * [PARQUET-694] - C++: Revert default data page size back to 1M
-    * [PARQUET-842] - [C++] Impala rejects DOUBLE columns if decimal metadata is set
-    * [PARQUET-708] - [C++] RleEncoder does not account for "worst case scenario" in MaxBufferSize for bit_width > 1
-    * [PARQUET-639] - Do not export DCHECK in public headers
-    * [PARQUET-828] - [C++] "version" field set improperly in file metadata
-    * [PARQUET-891] - [C++] Do not search for Snappy in default system paths
-    * [PARQUET-626] - Fix builds due to unavailable llvm.org apt mirror
-    * [PARQUET-629] - RowGroupSerializer should only close itself once
-    * [PARQUET-472] - Clean up InputStream ownership semantics in ColumnReader
-    * [PARQUET-739] - Rle-decoding uses static buffer that is shared accross threads
-    * [PARQUET-561] - ParquetFileReader::Contents PIMPL missing a virtual destructor
-    * [PARQUET-892] - [C++] Clean up link library targets in CMake files
-    * [PARQUET-454] - Address inconsistencies in boolean decoding
-    * [PARQUET-816] - [C++] Failure decoding sample dict-encoded file from parquet-compatibility project
-    * [PARQUET-565] - Use PATH instead of DIRECTORY in get_filename_component to support CMake<2.8.12
-    * [PARQUET-446] - Hide thrift dependency in parquet-cpp
-    * [PARQUET-843] - [C++] Impala unable to read files created by parquet-cpp
-    * [PARQUET-555] - Dictionary page metadata handling inconsistencies
-    * [PARQUET-908] - Fix for PARQUET-890 introduces undefined symbol in libparquet_arrow.so
-    * [PARQUET-793] - [CPP] Do not return incorrect statistics
-    * [PARQUET-887] - C++: Fix issues in release scripts arise in RC1
-
-## Improvement
-    * [PARQUET-277] - Remove boost dependency
-    * [PARQUET-500] - Enable coveralls.io for apache/parquet-cpp
-    * [PARQUET-497] - Decouple Parquet physical file structure from FileReader class
-    * [PARQUET-597] - Add data rates to benchmark output
-    * [PARQUET-522] - #include cleanup with include-what-you-use
-    * [PARQUET-515] - Add "Reset" to LevelEncoder and LevelDecoder
-    * [PARQUET-514] - Automate coveralls.io updates in Travis CI
-    * [PARQUET-551] - Handle compiler warnings due to disabled DCHECKs in release builds
-    * [PARQUET-559] - Enable InputStream as a source to the ParquetFileReader
-    * [PARQUET-562] - Simplified ZSH support in build scripts
-    * [PARQUET-538] - Improve ColumnReader Tests
-    * [PARQUET-541] - Portable build scripts
-    * [PARQUET-724] - Test more advanced properties setting
-    * [PARQUET-641] - Instantiate stringstream only if needed in SerializedPageReader::NextPage
-    * [PARQUET-636] - Expose selection for different encodings
-    * [PARQUET-603] - Implement missing information in schema descriptor
-    * [PARQUET-610] - Print ColumnMetaData for each RowGroup
-    * [PARQUET-600] - Add benchmarks for RLE-Level encoding
-    * [PARQUET-592] - Support compressed writes
-    * [PARQUET-593] - Add API for writing Page statistics
-    * [PARQUET-589] - Implement Chunked InMemoryInputStream for better memory usage
-    * [PARQUET-587] - Implement BufferReader::Read(int64_t,uint8_t*)
-    * [PARQUET-616] - C++: WriteBatch should accept const arrays
-    * [PARQUET-630] - C++: Support link flags for older CMake versions
-    * [PARQUET-634] - Consistent private linking of dependencies
-    * [PARQUET-633] - Add version to WriterProperties
-    * [PARQUET-625] - Improve RLE read performance
-    * [PARQUET-737] - Use absolute namespace in macros
-    * [PARQUET-762] - C++: Use optimistic allocation instead of Arrow Builders
-    * [PARQUET-773] - C++: Check licenses with RAT in CI
-    * [PARQUET-687] - C++: Switch to PLAIN encoding if dictionary grows too large
-    * [PARQUET-784] - C++: Reference Spark, Kudu and FrameOfReference in LICENSE
-    * [PARQUET-809] - [C++] Add API to determine if two files' schemas are compatible
-    * [PARQUET-778] - Standardize the schema output to match the parquet-mr format
-    * [PARQUET-463] - Add DCHECK* macros for assertions in debug builds
-    * [PARQUET-471] - Use the same environment setup script for Travis CI as local sandbox development
-    * [PARQUET-449] - Update to latest parquet.thrift
-    * [PARQUET-496] - Fix cpplint configuration to be more restrictive
-    * [PARQUET-468] - Add a cmake option to generate the Parquet thrift headers with the thriftc in the environment
-    * [PARQUET-482] - Organize src code file structure to have a very clear folder with public headers.
-    * [PARQUET-591] - Page size estimation during writes
-    * [PARQUET-518] - Review usages of size_t and unsigned integers generally per Google style guide
-    * [PARQUET-533] - Simplify RandomAccessSource API to combine Seek/Read
-    * [PARQUET-767] - Add release scripts for parquet-cpp
-    * [PARQUET-699] - Update parquet.thrift from https://github.com/apache/parquet-format
-    * [PARQUET-653] - [C++] Re-enable -static-libstdc++ in dev artifact builds
-    * [PARQUET-763] - C++: Expose ParquetFileReader through Arrow reader
-    * [PARQUET-857] - [C++] Flatten parquet/encodings directory
-    * [PARQUET-862] - Provide defaut cache size values if CPU info probing is not available
-    * [PARQUET-689] - C++: Compress DataPages eagerly
-    * [PARQUET-874] - [C++] Use default memory allocator from Arrow
-    * [PARQUET-267] - Detach thirdparty code from build configuration.
-    * [PARQUET-418] - Add a utility to print contents of a Parquet file to stdout
-    * [PARQUET-519] - Disable compiler warning supressions and fix all DEBUG build warnings
-    * [PARQUET-447] - Add Debug and Release build types and associated compiler flags
-    * [PARQUET-868] - C++: Build snappy with optimizations
-    * [PARQUET-894] - Fix compilation warning
-    * [PARQUET-883] - C++: Support non-standard gcc version strings
-    * [PARQUET-607] - Public Writer header
-    * [PARQUET-731] - [CPP] Add API to return metadata size and Skip reading values
-    * [PARQUET-628] - Link thrift privately
-    * [PARQUET-877] - C++: Update Arrow Hash, update Version in metadata.
-    * [PARQUET-547] - Refactor most templates to use DataType structs rather than the Type::type enum
-    * [PARQUET-882] - [CPP] Improve Application Version parsing
-    * [PARQUET-448] - Add cmake option to skip building the unit tests
-    * [PARQUET-721] - Performance benchmarks for reading into Arrow structures
-    * [PARQUET-820] - C++: Decoders should directly emit arrays with spacing for null entries
-    * [PARQUET-813] - C++: Build dependencies using CMake External project
-    * [PARQUET-488] - Add SSE-related cmake options to manage compiler flags
-    * [PARQUET-564] - Add option to run unit tests with valgrind --tool=memcheck
-    * [PARQUET-572] - Rename parquet_cpp namespace to parquet
-    * [PARQUET-829] - C++: Make use of ARROW-469
-    * [PARQUET-501] - Add an OutputStream abstraction (capable of memory allocation) for Encoder public API
-    * [PARQUET-744] - Clarifications on build instructions
-    * [PARQUET-520] - Add version of LocalFileSource that uses memory-mapping for zero-copy reads
-    * [PARQUET-556] - Extend RowGroupStatistics to include "min" "max" statistics
-    * [PARQUET-671] - Improve performance of RLE/bit-packed decoding in parquet-cpp
-    * [PARQUET-681] - Add tool to scan a parquet file
-
-## New Feature
-    * [PARQUET-499] - Complete PlainEncoder implementation for all primitive types and test end to end
-    * [PARQUET-439] - Conform all copyright headers to ASF requirements
-    * [PARQUET-436] - Implement ParquetFileWriter class entry point for generating new Parquet files
-    * [PARQUET-435] - Provide vectorized ColumnReader interface
-    * [PARQUET-438] - Update RLE encoder/decoder modules from Impala upstream changes and adapt unit tests
-    * [PARQUET-512] - Add optional google/benchmark 3rd-party dependency for performance testing
-    * [PARQUET-566] - Add method to retrieve the full column path
-    * [PARQUET-613] - C++: Add conda packaging recipe
-    * [PARQUET-605] - Expose schema node in ColumnDescriptor
-    * [PARQUET-619] - C++: Add OutputStream for local files
-    * [PARQUET-583] - Implement Parquet to Thrift schema conversion
-    * [PARQUET-582] - Conversion functions for Parquet enums to Thrift enums
-    * [PARQUET-728] - [C++] Bring parquet::arrow up to date with API changes in arrow::io
-    * [PARQUET-752] - [C++] Conform parquet_arrow to upstream API changes
-    * [PARQUET-788] - [C++] Reference Impala / Apache Impala (incubating) in LICENSE
-    * [PARQUET-808] - [C++] Add API to read file given externally-provided FileMetadata
-    * [PARQUET-807] - [C++] Add API to read file metadata only from a file handle
-    * [PARQUET-805] - C++: Read Int96 into Arrow Timestamp(ns)
-    * [PARQUET-836] - [C++] Add column selection to parquet::arrow::FileReader
-    * [PARQUET-835] - [C++] Add option to parquet::arrow to read columns in parallel using a thread pool
-    * [PARQUET-830] - [C++] Add additional configuration options to parquet::arrow::OpenFIle
-    * [PARQUET-769] - C++: Add support for Brotli Compression
-    * [PARQUET-489] - Add visibility macros to be used for public and internal APIs of libparquet
-    * [PARQUET-542] - Support memory allocation from external memory
-    * [PARQUET-844] - [C++] Consolidate encodings, schema, and compression subdirectories into fewer files
-    * [PARQUET-848] - [C++] Consolidate libparquet_thrift subcomponent
-    * [PARQUET-646] - [C++] Enable easier 3rd-party toolchain clang builds on Linux
-    * [PARQUET-598] - [C++] Test writing all primitive data types
-    * [PARQUET-442] - Convert flat SchemaElement vector to implied nested schema data structure
-    * [PARQUET-867] - [C++] Support writing sliced Arrow arrays
-    * [PARQUET-456] - Add zlib codec support
-    * [PARQUET-834] - C++: Support r/w of arrow::ListArray
-    * [PARQUET-485] - Decouple data page delimiting from column reader / scanner classes, create test fixtures
-    * [PARQUET-434] - Add a ParquetFileReader class to encapsulate some low-level details of interacting with Parquet files
-    * [PARQUET-666] - PLAIN_DICTIONARY write support
-    * [PARQUET-437] - Incorporate googletest thirdparty dependency and add cmake tools (ADD_PARQUET_TEST) to simplify adding new unit tests
-    * [PARQUET-866] - [C++] Account for API changes in ARROW-33
-    * [PARQUET-545] - Improve API to support Decimal type
-    * [PARQUET-579] - Add API for writing Column statistics
-    * [PARQUET-494] - Implement PLAIN_DICTIONARY encoding and decoding
-    * [PARQUET-618] - C++: Automatically upload conda build artifacts on commits to master
-    * [PARQUET-833] - C++: Provide API to write spaced arrays (e.g. Arrow)
-    * [PARQUET-903] - C++: Add option to set RPATH to ORIGIN
-    * [PARQUET-451] - Add a RowGroup reader interface class
-    * [PARQUET-785] - C++: List conversion for Arrow Schemas
-    * [PARQUET-712] - C++: Read into Arrow memory
-    * [PARQUET-890] - C++: Support I/O of DATE columns in parquet_arrow
-    * [PARQUET-782] - C++: Support writing to Arrow sinks
-    * [PARQUET-849] - [C++] Upgrade default Thrift in thirdparty toolchain to 0.9.3 or 0.10
-    * [PARQUET-573] - C++: Create a public API for reading and writing file metadata
-
-## Task
-    * [PARQUET-814] - C++: Remove Conda recipes
-    * [PARQUET-503] - Re-enable parquet 2.0 encodings
-    * [PARQUET-169] - Parquet-cpp: Implement support for bulk reading and writing repetition/definition levels.
-    * [PARQUET-878] - C++: Remove setup_build_env from rc-verification script
-    * [PARQUET-881] - C++: Update Arrow hash to 0.2.0-rc2
-    * [PARQUET-771] - C++: Sync KEYS file
-    * [PARQUET-901] - C++: Publish RCs in apache-parquet-VERSION in SVN
-
-## Test
-    * [PARQUET-525] - Test coverage for malformed file failure modes on the read path
-    * [PARQUET-703] - [C++] Validate num_values metadata for columns with nulls
-    * [PARQUET-507] - Improve runtime of rle-test.cc
-    * [PARQUET-549] - Add scanner and column reader tests for dictionary data pages
-    * [PARQUET-457] - Add compressed data page unit tests
diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt
deleted file mode 100644
index 1705e85..0000000
--- a/cpp/CMakeLists.txt
+++ /dev/null
@@ -1,925 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-cmake_minimum_required(VERSION 3.5)
-message(STATUS "Building using CMake version: ${CMAKE_VERSION}")
-
-# Compiler id for Apple Clang is now AppleClang.
-# https://www.cmake.org/cmake/help/latest/policy/CMP0025.html
-cmake_policy(SET CMP0025 NEW)
-
-# Only interpret if() arguments as variables or keywords when unquoted.
-# https://www.cmake.org/cmake/help/latest/policy/CMP0054.html
-cmake_policy(SET CMP0054 NEW)
-
-# Support new if() IN_LIST operator.
-# https://www.cmake.org/cmake/help/latest/policy/CMP0057.html
-cmake_policy(SET CMP0057 NEW)
-
-# Adapted from Apache Kudu: https://github.com/apache/kudu/commit/bd549e13743a51013585
-# Honor visibility properties for all target types.
-# https://www.cmake.org/cmake/help/latest/policy/CMP0063.html
-cmake_policy(SET CMP0063 NEW)
-
-# RPATH settings on macOS do not affect install_name.
-# https://cmake.org/cmake/help/latest/policy/CMP0068.html
-if(POLICY CMP0068)
-  cmake_policy(SET CMP0068 NEW)
-endif()
-
-# find_package() uses <PackageName>_ROOT variables.
-# https://cmake.org/cmake/help/latest/policy/CMP0074.html
-if(POLICY CMP0074)
-  cmake_policy(SET CMP0074 NEW)
-endif()
-
-set(ARROW_VERSION "4.0.0-SNAPSHOT")
-
-string(REGEX MATCH "^[0-9]+\\.[0-9]+\\.[0-9]+" ARROW_BASE_VERSION "${ARROW_VERSION}")
-
-# if no build build type is specified, default to release builds
-if(NOT DEFINED CMAKE_BUILD_TYPE)
-  set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build.")
-endif()
-string(TOLOWER ${CMAKE_BUILD_TYPE} LOWERCASE_BUILD_TYPE)
-string(TOUPPER ${CMAKE_BUILD_TYPE} UPPERCASE_BUILD_TYPE)
-
-list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake_modules")
-
-# this must be included before the project() command, because of the way
-# vcpkg (ab)uses CMAKE_TOOLCHAIN_FILE to inject its logic into CMake
-if(ARROW_DEPENDENCY_SOURCE STREQUAL "VCPKG")
-  include(Usevcpkg)
-endif()
-
-project(arrow VERSION "${ARROW_BASE_VERSION}")
-
-set(ARROW_VERSION_MAJOR "${arrow_VERSION_MAJOR}")
-set(ARROW_VERSION_MINOR "${arrow_VERSION_MINOR}")
-set(ARROW_VERSION_PATCH "${arrow_VERSION_PATCH}")
-if(ARROW_VERSION_MAJOR STREQUAL ""
-   OR ARROW_VERSION_MINOR STREQUAL ""
-   OR ARROW_VERSION_PATCH STREQUAL "")
-  message(FATAL_ERROR "Failed to determine Arrow version from '${ARROW_VERSION}'")
-endif()
-
-# The SO version is also the ABI version
-if(ARROW_VERSION_MAJOR STREQUAL "0")
-  # Arrow 0.x.y => SO version is "x", full SO version is "x.y.0"
-  set(ARROW_SO_VERSION "${ARROW_VERSION_MINOR}")
-  set(ARROW_FULL_SO_VERSION "${ARROW_SO_VERSION}.${ARROW_VERSION_PATCH}.0")
-else()
-  # Arrow 1.x.y => SO version is "10x", full SO version is "10x.y.0"
-  math(EXPR ARROW_SO_VERSION "${ARROW_VERSION_MAJOR} * 100 + ${ARROW_VERSION_MINOR}")
-  set(ARROW_FULL_SO_VERSION "${ARROW_SO_VERSION}.${ARROW_VERSION_PATCH}.0")
-endif()
-
-message(STATUS "Arrow version: "
-               "${ARROW_VERSION_MAJOR}.${ARROW_VERSION_MINOR}.${ARROW_VERSION_PATCH} "
-               "(full: '${ARROW_VERSION}')")
-message(STATUS "Arrow SO version: ${ARROW_SO_VERSION} (full: ${ARROW_FULL_SO_VERSION})")
-
-set(ARROW_SOURCE_DIR ${PROJECT_SOURCE_DIR})
-set(ARROW_BINARY_DIR ${PROJECT_BINARY_DIR})
-
-include(CMakePackageConfigHelpers)
-include(CMakeParseArguments)
-include(ExternalProject)
-include(FindPackageHandleStandardArgs)
-
-include(GNUInstallDirs)
-
-set(BUILD_SUPPORT_DIR "${CMAKE_SOURCE_DIR}/build-support")
-
-set(ARROW_CMAKE_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")
-set(ARROW_DOC_DIR "share/doc/${PROJECT_NAME}")
-
-set(ARROW_LLVM_VERSIONS
-    "11.1"
-    "11.0"
-    "10"
-    "9"
-    "8"
-    "7")
-list(GET ARROW_LLVM_VERSIONS 0 ARROW_LLVM_VERSION_PRIMARY)
-string(REGEX
-       REPLACE "^([0-9]+)(\\..+)?" "\\1" ARROW_LLVM_VERSION_PRIMARY_MAJOR
-               "${ARROW_LLVM_VERSION_PRIMARY}")
-
-file(READ ${CMAKE_CURRENT_SOURCE_DIR}/../.env ARROW_ENV)
-string(REGEX MATCH "CLANG_TOOLS=[^\n]+" ARROW_ENV_CLANG_TOOLS_VERSION "${ARROW_ENV}")
-string(REGEX
-       REPLACE "^CLANG_TOOLS=" "" ARROW_CLANG_TOOLS_VERSION
-               "${ARROW_ENV_CLANG_TOOLS_VERSION}")
-string(REGEX
-       REPLACE "^([0-9]+)(\\..+)?" "\\1" ARROW_CLANG_TOOLS_VERSION_MAJOR
-               "${ARROW_CLANG_TOOLS_VERSION}")
-
-if(APPLE)
-  find_program(BREW_BIN brew)
-  if(BREW_BIN)
-    execute_process(COMMAND ${BREW_BIN} --prefix
-                            "llvm@${ARROW_LLVM_VERSION_PRIMARY_MAJOR}"
-                    OUTPUT_VARIABLE LLVM_BREW_PREFIX
-                    OUTPUT_STRIP_TRAILING_WHITESPACE)
-    if(NOT LLVM_BREW_PREFIX)
-      execute_process(COMMAND ${BREW_BIN} --prefix llvm
-                      OUTPUT_VARIABLE LLVM_BREW_PREFIX
-                      OUTPUT_STRIP_TRAILING_WHITESPACE)
-    endif()
-
-    execute_process(COMMAND ${BREW_BIN} --prefix "llvm@${ARROW_CLANG_TOOLS_VERSION_MAJOR}"
-                    OUTPUT_VARIABLE CLANG_TOOLS_BREW_PREFIX
-                    OUTPUT_STRIP_TRAILING_WHITESPACE)
-    if(NOT CLANG_TOOLS_BREW_PREFIX)
-      execute_process(COMMAND ${BREW_BIN} --prefix llvm
-                      OUTPUT_VARIABLE CLANG_TOOLS_BREW_PREFIX
-                      OUTPUT_STRIP_TRAILING_WHITESPACE)
-    endif()
-  endif()
-endif()
-
-if(WIN32 AND NOT MINGW)
-  # This is used to handle builds using e.g. clang in an MSVC setting.
-  set(MSVC_TOOLCHAIN TRUE)
-else()
-  set(MSVC_TOOLCHAIN FALSE)
-endif()
-
-find_package(ClangTools)
-find_package(InferTools)
-if("$ENV{CMAKE_EXPORT_COMPILE_COMMANDS}" STREQUAL "1" OR CLANG_TIDY_FOUND OR INFER_FOUND)
-  # Generate a Clang compile_commands.json "compilation database" file for use
-  # with various development tools, such as Vim's YouCompleteMe plugin.
-  # See http://clang.llvm.org/docs/JSONCompilationDatabase.html
-  set(CMAKE_EXPORT_COMPILE_COMMANDS 1)
-endif()
-
-# ----------------------------------------------------------------------
-# cmake options
-include(DefineOptions)
-
-# Needed for linting targets, etc.
-if(${CMAKE_VERSION} VERSION_LESS "3.12.0")
-  find_package(PythonInterp)
-else()
-  # Use the first Python installation on PATH, not the newest one
-  set(Python3_FIND_STRATEGY "LOCATION")
-  # On Windows, use registry last, not first
-  set(Python3_FIND_REGISTRY "LAST")
-  # On macOS, use framework last, not first
-  set(Python3_FIND_FRAMEWORK "LAST")
-
-  find_package(Python3)
-  set(PYTHON_EXECUTABLE ${Python3_EXECUTABLE})
-endif()
-
-if(ARROW_USE_CCACHE)
-  find_program(CCACHE_FOUND ccache)
-  if(CCACHE_FOUND)
-    message(STATUS "Using ccache: ${CCACHE_FOUND}")
-    set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ${CCACHE_FOUND})
-    set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ${CCACHE_FOUND})
-    # ARROW-3985: let ccache preserve C++ comments, because some of them may be
-    # meaningful to the compiler
-    set(ENV{CCACHE_COMMENTS} "1")
-  endif(CCACHE_FOUND)
-endif()
-
-if(ARROW_USE_PRECOMPILED_HEADERS AND ${CMAKE_VERSION} VERSION_LESS "3.16.0")
-  message(WARNING "Precompiled headers need CMake 3.16.0 or later, disabling")
-  set(ARROW_USE_PRECOMPILED_HEADERS OFF)
-endif()
-
-if(ARROW_OPTIONAL_INSTALL)
-  # Don't make the "install" target depend on the "all" target
-  set(CMAKE_SKIP_INSTALL_ALL_DEPENDENCY true)
-
-  set(INSTALL_IS_OPTIONAL OPTIONAL)
-endif()
-
-#
-# "make lint" target
-#
-if(NOT ARROW_VERBOSE_LINT)
-  set(ARROW_LINT_QUIET "--quiet")
-endif()
-
-if(NOT LINT_EXCLUSIONS_FILE)
-  # source files matching a glob from a line in this file
-  # will be excluded from linting (cpplint, clang-tidy, clang-format)
-  set(LINT_EXCLUSIONS_FILE ${BUILD_SUPPORT_DIR}/lint_exclusions.txt)
-endif()
-
-find_program(CPPLINT_BIN NAMES cpplint cpplint.py HINTS ${BUILD_SUPPORT_DIR})
-message(STATUS "Found cpplint executable at ${CPPLINT_BIN}")
-
-add_custom_target(lint
-                  ${PYTHON_EXECUTABLE}
-                  ${BUILD_SUPPORT_DIR}/run_cpplint.py
-                  --cpplint_binary
-                  ${CPPLINT_BIN}
-                  --exclude_globs
-                  ${LINT_EXCLUSIONS_FILE}
-                  --source_dir
-                  ${CMAKE_CURRENT_SOURCE_DIR}/src
-                  ${ARROW_LINT_QUIET})
-
-#
-# "make format" and "make check-format" targets
-#
-if(${CLANG_FORMAT_FOUND})
-  # runs clang format and updates files in place.
-  add_custom_target(format
-                    ${PYTHON_EXECUTABLE}
-                    ${BUILD_SUPPORT_DIR}/run_clang_format.py
-                    --clang_format_binary
-                    ${CLANG_FORMAT_BIN}
-                    --exclude_globs
-                    ${LINT_EXCLUSIONS_FILE}
-                    --source_dir
-                    ${CMAKE_CURRENT_SOURCE_DIR}/src
-                    --fix
-                    ${ARROW_LINT_QUIET})
-
-  # runs clang format and exits with a non-zero exit code if any files need to be reformatted
-  add_custom_target(check-format
-                    ${PYTHON_EXECUTABLE}
-                    ${BUILD_SUPPORT_DIR}/run_clang_format.py
-                    --clang_format_binary
-                    ${CLANG_FORMAT_BIN}
-                    --exclude_globs
-                    ${LINT_EXCLUSIONS_FILE}
-                    --source_dir
-                    ${CMAKE_CURRENT_SOURCE_DIR}/src
-                    ${ARROW_LINT_QUIET})
-endif()
-
-add_custom_target(lint_cpp_cli ${PYTHON_EXECUTABLE} ${BUILD_SUPPORT_DIR}/lint_cpp_cli.py
-                  ${CMAKE_CURRENT_SOURCE_DIR}/src)
-
-if(ARROW_LINT_ONLY)
-  message("ARROW_LINT_ONLY was specified, this is only a partial build directory")
-  return()
-endif()
-
-#
-# "make clang-tidy" and "make check-clang-tidy" targets
-#
-if(${CLANG_TIDY_FOUND})
-  # TODO check to make sure .clang-tidy is being respected
-
-  # runs clang-tidy and attempts to fix any warning automatically
-  add_custom_target(clang-tidy
-                    ${PYTHON_EXECUTABLE}
-                    ${BUILD_SUPPORT_DIR}/run_clang_tidy.py
-                    --clang_tidy_binary
-                    ${CLANG_TIDY_BIN}
-                    --exclude_globs
-                    ${LINT_EXCLUSIONS_FILE}
-                    --compile_commands
-                    ${CMAKE_BINARY_DIR}/compile_commands.json
-                    --source_dir
-                    ${CMAKE_CURRENT_SOURCE_DIR}/src
-                    --fix
-                    ${ARROW_LINT_QUIET})
-
-  # runs clang-tidy and exits with a non-zero exit code if any errors are found.
-  add_custom_target(check-clang-tidy
-                    ${PYTHON_EXECUTABLE}
-                    ${BUILD_SUPPORT_DIR}/run_clang_tidy.py
-                    --clang_tidy_binary
-                    ${CLANG_TIDY_BIN}
-                    --exclude_globs
-                    ${LINT_EXCLUSIONS_FILE}
-                    --compile_commands
-                    ${CMAKE_BINARY_DIR}/compile_commands.json
-                    --source_dir
-                    ${CMAKE_CURRENT_SOURCE_DIR}/src
-                    ${ARROW_LINT_QUIET})
-endif()
-
-if(UNIX)
-  add_custom_target(iwyu ${BUILD_SUPPORT_DIR}/iwyu/iwyu.sh)
-  add_custom_target(iwyu-all ${BUILD_SUPPORT_DIR}/iwyu/iwyu.sh all)
-endif(UNIX)
-
-#
-# Set up various options
-#
-
-if(ARROW_BUILD_BENCHMARKS
-   OR ARROW_BUILD_TESTS
-   OR ARROW_BUILD_INTEGRATION
-   OR ARROW_FUZZING)
-  set(ARROW_JSON ON)
-  set(ARROW_TESTING ON)
-endif()
-
-if(ARROW_GANDIVA)
-  set(ARROW_WITH_RE2 ON)
-endif()
-
-if(ARROW_CUDA
-   OR ARROW_FLIGHT
-   OR ARROW_PARQUET
-   OR ARROW_BUILD_TESTS
-   OR ARROW_BUILD_BENCHMARKS)
-  set(ARROW_IPC ON)
-endif()
-
-if(ARROW_DATASET)
-  set(ARROW_COMPUTE ON)
-  set(ARROW_FILESYSTEM ON)
-endif()
-
-if(ARROW_PARQUET)
-  set(ARROW_COMPUTE ON)
-endif()
-
-if(ARROW_PYTHON)
-  set(ARROW_COMPUTE ON)
-  set(ARROW_CSV ON)
-  set(ARROW_DATASET ON)
-  set(ARROW_FILESYSTEM ON)
-  set(ARROW_HDFS ON)
-  set(ARROW_JSON ON)
-endif()
-
-if(MSVC_TOOLCHAIN)
-  # ORC doesn't build on windows
-  set(ARROW_ORC OFF)
-  # Plasma using glog is not fully tested on windows.
-  set(ARROW_USE_GLOG OFF)
-endif()
-
-if(ARROW_JNI)
-  set(ARROW_BUILD_STATIC ON)
-endif()
-
-if(ARROW_ORC)
-  set(ARROW_WITH_LZ4 ON)
-  set(ARROW_WITH_SNAPPY ON)
-  set(ARROW_WITH_ZLIB ON)
-  set(ARROW_WITH_ZSTD ON)
-endif()
-
-# datetime code used by iOS requires zlib support
-if(IOS)
-  set(ARROW_WITH_ZLIB ON)
-endif()
-
-if(NOT ARROW_BUILD_TESTS)
-  set(NO_TESTS 1)
-else()
-  add_custom_target(all-tests)
-  add_custom_target(unittest
-                    ctest
-                    -j4
-                    -L
-                    unittest
-                    --output-on-failure)
-  add_dependencies(unittest all-tests)
-endif()
-
-if(ARROW_ENABLE_TIMING_TESTS)
-  add_definitions(-DARROW_WITH_TIMING_TESTS)
-endif()
-
-if(NOT ARROW_BUILD_BENCHMARKS)
-  set(NO_BENCHMARKS 1)
-else()
-  add_custom_target(all-benchmarks)
-  add_custom_target(benchmark ctest -L benchmark)
-  add_dependencies(benchmark all-benchmarks)
-  if(ARROW_BUILD_BENCHMARKS_REFERENCE)
-    add_definitions(-DARROW_WITH_BENCHMARKS_REFERENCE)
-  endif()
-endif()
-
-if(NOT ARROW_BUILD_EXAMPLES)
-  set(NO_EXAMPLES 1)
-endif()
-
-if(NOT ARROW_FUZZING)
-  set(NO_FUZZING 1)
-endif()
-
-if(ARROW_LARGE_MEMORY_TESTS)
-  add_definitions(-DARROW_LARGE_MEMORY_TESTS)
-endif()
-
-if(ARROW_TEST_MEMCHECK)
-  add_definitions(-DARROW_VALGRIND)
-endif()
-
-if(ARROW_USE_UBSAN)
-  add_definitions(-DARROW_UBSAN)
-endif()
-
-#
-# Compiler flags
-#
-
-if(ARROW_NO_DEPRECATED_API)
-  add_definitions(-DARROW_NO_DEPRECATED_API)
-endif()
-
-if(ARROW_EXTRA_ERROR_CONTEXT)
-  add_definitions(-DARROW_EXTRA_ERROR_CONTEXT)
-endif()
-
-include(SetupCxxFlags)
-
-#
-# Build output directory
-#
-
-# set compile output directory
-string(TOLOWER ${CMAKE_BUILD_TYPE} BUILD_SUBDIR_NAME)
-
-# If build in-source, create the latest symlink. If build out-of-source, which is
-# preferred, simply output the binaries in the build folder
-if(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_CURRENT_BINARY_DIR})
-  set(BUILD_OUTPUT_ROOT_DIRECTORY
-      "${CMAKE_CURRENT_BINARY_DIR}/build/${BUILD_SUBDIR_NAME}/")
-  # Link build/latest to the current build directory, to avoid developers
-  # accidentally running the latest debug build when in fact they're building
-  # release builds.
-  file(MAKE_DIRECTORY ${BUILD_OUTPUT_ROOT_DIRECTORY})
-  if(NOT APPLE)
-    set(MORE_ARGS "-T")
-  endif()
-  execute_process(COMMAND ln
-                          ${MORE_ARGS}
-                          -sf
-                          ${BUILD_OUTPUT_ROOT_DIRECTORY}
-                          ${CMAKE_CURRENT_BINARY_DIR}/build/latest)
-else()
-  set(BUILD_OUTPUT_ROOT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${BUILD_SUBDIR_NAME}/")
-endif()
-
-# where to put generated archives (.a files)
-set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-set(ARCHIVE_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-
-# where to put generated libraries (.so files)
-set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-set(LIBRARY_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-
-# where to put generated binaries
-set(EXECUTABLE_OUTPUT_PATH "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-
-if(CMAKE_GENERATOR STREQUAL Xcode)
-  # Xcode projects support multi-configuration builds.  This forces a single output directory
-  # when building with Xcode that is consistent with single-configuration Makefile driven build.
-  set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY_${UPPERCASE_BUILD_TYPE}
-      "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-  set(CMAKE_LIBRARY_OUTPUT_DIRECTORY_${UPPERCASE_BUILD_TYPE}
-      "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-  set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_${UPPERCASE_BUILD_TYPE}
-      "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-endif()
-
-#
-# Dependencies
-#
-
-include(BuildUtils)
-enable_testing()
-
-include(ThirdpartyToolchain)
-
-# Add common flags
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${CXX_COMMON_FLAGS}")
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${ARROW_CXXFLAGS}")
-
-# For any C code, use the same flags. These flags don't contain
-# C++ specific flags.
-set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${CXX_COMMON_FLAGS} ${ARROW_CXXFLAGS}")
-
-# Remove --std=c++11 to avoid errors from C compilers
-string(REPLACE "-std=c++11" "" CMAKE_C_FLAGS ${CMAKE_C_FLAGS})
-
-# Add C++-only flags, like -std=c++11
-set(CMAKE_CXX_FLAGS "${CXX_ONLY_FLAGS} ${CMAKE_CXX_FLAGS}")
-
-# ASAN / TSAN / UBSAN
-if(ARROW_FUZZING)
-  set(ARROW_USE_COVERAGE ON)
-endif()
-include(san-config)
-
-# Code coverage
-if("${ARROW_GENERATE_COVERAGE}")
-  set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} --coverage -DCOVERAGE_BUILD")
-  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage -DCOVERAGE_BUILD")
-endif()
-
-# CMAKE_CXX_FLAGS now fully assembled
-message(STATUS "CMAKE_C_FLAGS: ${CMAKE_C_FLAGS}")
-message(STATUS "CMAKE_CXX_FLAGS: ${CMAKE_CXX_FLAGS}")
-
-include_directories(${CMAKE_CURRENT_BINARY_DIR}/src)
-include_directories(src)
-
-# Compiled flatbuffers files
-include_directories(src/generated)
-
-#
-# Visibility
-#
-if(PARQUET_BUILD_SHARED)
-  set_target_properties(arrow_shared
-                        PROPERTIES C_VISIBILITY_PRESET
-                                   hidden
-                                   CXX_VISIBILITY_PRESET
-                                   hidden
-                                   VISIBILITY_INLINES_HIDDEN
-                                   1)
-endif()
-
-#
-# "make ctags" target
-#
-if(UNIX)
-  add_custom_target(ctags ctags -R --languages=c++,c)
-endif(UNIX)
-
-#
-# "make etags" target
-#
-if(UNIX)
-  add_custom_target(tags
-                    etags
-                    --members
-                    --declarations
-                    `find
-                    ${CMAKE_CURRENT_SOURCE_DIR}/src
-                    -name
-                    \\*.cc
-                    -or
-                    -name
-                    \\*.hh
-                    -or
-                    -name
-                    \\*.cpp
-                    -or
-                    -name
-                    \\*.h
-                    -or
-                    -name
-                    \\*.c
-                    -or
-                    -name
-                    \\*.f`)
-  add_custom_target(etags DEPENDS tags)
-endif(UNIX)
-
-#
-# "make cscope" target
-#
-if(UNIX)
-  add_custom_target(cscope find ${CMAKE_CURRENT_SOURCE_DIR}
-                    (-name
-                     \\*.cc
-                     -or
-                     -name
-                     \\*.hh
-                     -or
-                     -name
-                     \\*.cpp
-                     -or
-                     -name
-                     \\*.h
-                     -or
-                     -name
-                     \\*.c
-                     -or
-                     -name
-                     \\*.f)
-                    -exec
-                    echo
-                    \"{}\"
-                    \;
-                    >
-                    cscope.files
-                    &&
-                    cscope
-                    -q
-                    -b
-                    VERBATIM)
-endif(UNIX)
-
-#
-# "make infer" target
-#
-
-if(${INFER_FOUND})
-  # runs infer capture
-  add_custom_target(infer
-                    ${BUILD_SUPPORT_DIR}/run-infer.sh
-                    ${INFER_BIN}
-                    ${CMAKE_BINARY_DIR}/compile_commands.json
-                    1)
-  # runs infer analyze
-  add_custom_target(infer-analyze
-                    ${BUILD_SUPPORT_DIR}/run-infer.sh
-                    ${INFER_BIN}
-                    ${CMAKE_BINARY_DIR}/compile_commands.json
-                    2)
-  # runs infer report
-  add_custom_target(infer-report
-                    ${BUILD_SUPPORT_DIR}/run-infer.sh
-                    ${INFER_BIN}
-                    ${CMAKE_BINARY_DIR}/compile_commands.json
-                    3)
-endif()
-
-#
-# Linker and Dependencies
-#
-
-# Libraries to link statically with libarrow.so
-set(ARROW_LINK_LIBS)
-set(ARROW_STATIC_LINK_LIBS)
-set(ARROW_STATIC_INSTALL_INTERFACE_LIBS)
-
-if(ARROW_USE_OPENSSL)
-  set(ARROW_OPENSSL_LIBS OpenSSL::Crypto OpenSSL::SSL)
-  list(APPEND ARROW_LINK_LIBS ${ARROW_OPENSSL_LIBS})
-  list(APPEND ARROW_STATIC_LINK_LIBS ${ARROW_OPENSSL_LIBS})
-  list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS ${ARROW_OPENSSL_LIBS})
-endif()
-
-if(ARROW_WITH_BROTLI)
-  # Order is important for static linking
-  set(ARROW_BROTLI_LIBS Brotli::brotlienc Brotli::brotlidec Brotli::brotlicommon)
-  list(APPEND ARROW_LINK_LIBS ${ARROW_BROTLI_LIBS})
-  list(APPEND ARROW_STATIC_LINK_LIBS ${ARROW_BROTLI_LIBS})
-  if(Brotli_SOURCE STREQUAL "SYSTEM")
-    list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS ${ARROW_BROTLI_LIBS})
-  endif()
-endif()
-
-if(ARROW_WITH_BZ2)
-  list(APPEND ARROW_STATIC_LINK_LIBS BZip2::BZip2)
-  if(BZip2_SOURCE STREQUAL "SYSTEM")
-    list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS BZip2::BZip2)
-  endif()
-endif()
-
-if(ARROW_WITH_LZ4)
-  list(APPEND ARROW_STATIC_LINK_LIBS LZ4::lz4)
-  if(Lz4_SOURCE STREQUAL "SYSTEM")
-    list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS LZ4::lz4)
-  endif()
-endif()
-
-if(ARROW_WITH_SNAPPY)
-  list(APPEND ARROW_STATIC_LINK_LIBS Snappy::snappy)
-  if(Snappy_SOURCE STREQUAL "SYSTEM")
-    list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS Snappy::snappy)
-  endif()
-endif()
-
-if(ARROW_WITH_ZLIB)
-  list(APPEND ARROW_STATIC_LINK_LIBS ZLIB::ZLIB)
-  if(ZLIB_SOURCE STREQUAL "SYSTEM")
-    list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS ZLIB::ZLIB)
-  endif()
-endif()
-
-if(ARROW_WITH_ZSTD)
-  list(APPEND ARROW_STATIC_LINK_LIBS ${ARROW_ZSTD_LIBZSTD})
-  if(zstd_SOURCE STREQUAL "SYSTEM")
-    list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS ${ARROW_ZSTD_LIBZSTD})
-  endif()
-endif()
-
-if(ARROW_ORC)
-  list(APPEND ARROW_LINK_LIBS orc::liborc ${ARROW_PROTOBUF_LIBPROTOBUF})
-  list(APPEND ARROW_STATIC_LINK_LIBS orc::liborc ${ARROW_PROTOBUF_LIBPROTOBUF})
-  if(ORC_SOURCE STREQUAL "SYSTEM")
-    list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS orc::liborc
-                ${ARROW_PROTOBUF_LIBPROTOBUF})
-  endif()
-endif()
-
-if(ARROW_USE_GLOG)
-  list(APPEND ARROW_LINK_LIBS glog::glog)
-  list(APPEND ARROW_STATIC_LINK_LIBS glog::glog)
-  if(GLOG_SOURCE STREQUAL "SYSTEM")
-    list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS glog::glog)
-  endif()
-  add_definitions("-DARROW_USE_GLOG")
-endif()
-
-if(ARROW_S3)
-  list(APPEND ARROW_LINK_LIBS ${AWSSDK_LINK_LIBRARIES})
-  list(APPEND ARROW_STATIC_LINK_LIBS ${AWSSDK_LINK_LIBRARIES})
-endif()
-
-if(ARROW_WITH_UTF8PROC)
-  list(APPEND ARROW_LINK_LIBS utf8proc::utf8proc)
-  list(APPEND ARROW_STATIC_LINK_LIBS utf8proc::utf8proc)
-  if(utf8proc_SOURCE STREQUAL "SYSTEM")
-    list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS utf8proc::utf8proc)
-  endif()
-endif()
-
-if(ARROW_WITH_RE2)
-  list(APPEND ARROW_LINK_LIBS re2::re2)
-  list(APPEND ARROW_STATIC_LINK_LIBS re2::re2)
-  if(re2_SOURCE STREQUAL "SYSTEM")
-    list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS re2::re2)
-  endif()
-endif()
-
-add_custom_target(arrow_dependencies)
-add_custom_target(arrow_benchmark_dependencies)
-add_custom_target(arrow_test_dependencies)
-
-# ARROW-4581: CMake can be finicky about invoking the ExternalProject builds
-# for some of the library dependencies, so we "nuke it from orbit" by making
-# the toolchain dependency explicit using these "dependencies" targets
-add_dependencies(arrow_dependencies toolchain)
-add_dependencies(arrow_test_dependencies toolchain-tests)
-
-if(ARROW_STATIC_LINK_LIBS)
-  add_dependencies(arrow_dependencies ${ARROW_STATIC_LINK_LIBS})
-  if(ARROW_ORC)
-    if(NOT MSVC_TOOLCHAIN)
-      list(APPEND ARROW_STATIC_LINK_LIBS ${CMAKE_DL_LIBS})
-      list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS ${CMAKE_DL_LIBS})
-    endif()
-  endif()
-endif()
-
-set(ARROW_SHARED_PRIVATE_LINK_LIBS ${ARROW_STATIC_LINK_LIBS})
-
-# boost::filesystem is needed for S3 and Flight tests as a boost::process dependency.
-if(((ARROW_FLIGHT OR ARROW_S3) AND (ARROW_BUILD_TESTS OR ARROW_BUILD_INTEGRATION)))
-  list(APPEND ARROW_TEST_LINK_LIBS ${BOOST_FILESYSTEM_LIBRARY} ${BOOST_SYSTEM_LIBRARY})
-endif()
-
-if(NOT MSVC_TOOLCHAIN)
-  list(APPEND ARROW_LINK_LIBS ${CMAKE_DL_LIBS})
-  list(APPEND ARROW_SHARED_INSTALL_INTERFACE_LIBS ${CMAKE_DL_LIBS})
-endif()
-
-set(ARROW_TEST_LINK_TOOLCHAIN
-    GTest::gtest_main
-    GTest::gtest
-    GTest::gmock
-    ${BOOST_FILESYSTEM_LIBRARY}
-    ${BOOST_SYSTEM_LIBRARY})
-
-if(ARROW_BUILD_TESTS)
-  add_dependencies(arrow_test_dependencies ${ARROW_TEST_LINK_TOOLCHAIN})
-endif()
-
-if(ARROW_BUILD_BENCHMARKS)
-  # Some benchmarks use gtest
-  add_dependencies(arrow_benchmark_dependencies arrow_test_dependencies
-                   toolchain-benchmarks)
-endif()
-
-set(ARROW_TEST_STATIC_LINK_LIBS arrow_testing_static arrow_static ${ARROW_LINK_LIBS}
-                                ${ARROW_TEST_LINK_TOOLCHAIN})
-
-set(ARROW_TEST_SHARED_LINK_LIBS arrow_testing_shared arrow_shared ${ARROW_LINK_LIBS}
-                                ${ARROW_TEST_LINK_TOOLCHAIN})
-
-if(NOT MSVC)
-  set(ARROW_TEST_SHARED_LINK_LIBS ${ARROW_TEST_SHARED_LINK_LIBS} ${CMAKE_DL_LIBS})
-endif()
-
-if("${ARROW_TEST_LINKAGE}" STREQUAL "shared")
-  if(ARROW_BUILD_TESTS AND NOT ARROW_BUILD_SHARED)
-    message(FATAL_ERROR "If using shared linkage for unit tests, must also \
-pass ARROW_BUILD_SHARED=on")
-  endif()
-  # Use shared linking for unit tests if it's available
-  set(ARROW_TEST_LINK_LIBS ${ARROW_TEST_SHARED_LINK_LIBS})
-  set(ARROW_EXAMPLE_LINK_LIBS arrow_shared)
-else()
-  if(ARROW_BUILD_TESTS AND NOT ARROW_BUILD_STATIC)
-    message(FATAL_ERROR "If using static linkage for unit tests, must also \
-pass ARROW_BUILD_STATIC=on")
-  endif()
-  set(ARROW_TEST_LINK_LIBS ${ARROW_TEST_STATIC_LINK_LIBS})
-  set(ARROW_EXAMPLE_LINK_LIBS arrow_static)
-endif()
-
-if(ARROW_BUILD_BENCHMARKS)
-  # In the case that benchmark::benchmark_main is not available,
-  # we need to provide our own version. This only happens for older versions
-  # of benchmark.
-  if(NOT TARGET benchmark::benchmark_main)
-    add_library(arrow_benchmark_main STATIC src/arrow/util/benchmark_main.cc)
-    add_library(benchmark::benchmark_main ALIAS arrow_benchmark_main)
-  endif()
-
-  set(ARROW_BENCHMARK_LINK_LIBS benchmark::benchmark_main benchmark::benchmark
-                                ${ARROW_TEST_LINK_LIBS})
-  if(WIN32)
-    set(ARROW_BENCHMARK_LINK_LIBS Shlwapi.dll ${ARROW_BENCHMARK_LINK_LIBS})
-  endif()
-endif()
-
-if(ARROW_JEMALLOC)
-  add_definitions(-DARROW_JEMALLOC)
-  add_definitions(-DARROW_JEMALLOC_INCLUDE_DIR=${JEMALLOC_INCLUDE_DIR})
-  list(APPEND ARROW_LINK_LIBS jemalloc::jemalloc)
-  list(APPEND ARROW_STATIC_LINK_LIBS jemalloc::jemalloc)
-endif()
-
-if(ARROW_MIMALLOC)
-  add_definitions(-DARROW_MIMALLOC)
-  list(APPEND ARROW_LINK_LIBS mimalloc::mimalloc)
-  list(APPEND ARROW_STATIC_LINK_LIBS mimalloc::mimalloc)
-endif()
-
-# ----------------------------------------------------------------------
-# Handle platform-related libraries like -pthread
-
-set(ARROW_SYSTEM_LINK_LIBS)
-
-if(THREADS_FOUND)
-  list(APPEND ARROW_SYSTEM_LINK_LIBS Threads::Threads)
-endif()
-
-if(WIN32)
-  # Winsock
-  list(APPEND ARROW_SYSTEM_LINK_LIBS "ws2_32.dll")
-endif()
-
-if(NOT WIN32 AND NOT APPLE)
-  # Pass -lrt on Linux only
-  list(APPEND ARROW_SYSTEM_LINK_LIBS rt)
-endif()
-
-list(APPEND ARROW_LINK_LIBS ${ARROW_SYSTEM_LINK_LIBS})
-list(APPEND ARROW_STATIC_LINK_LIBS ${ARROW_SYSTEM_LINK_LIBS})
-list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS ${ARROW_SYSTEM_LINK_LIBS})
-
-#
-# Subdirectories
-#
-
-if(NOT WIN32 AND ARROW_PLASMA)
-  add_subdirectory(src/plasma)
-endif()
-
-add_subdirectory(src/arrow)
-
-if(ARROW_PARQUET)
-  add_subdirectory(src/parquet)
-  add_subdirectory(tools/parquet)
-  if(PARQUET_BUILD_EXAMPLES)
-    add_subdirectory(examples/parquet)
-  endif()
-endif()
-
-if(ARROW_JNI)
-  add_subdirectory(src/jni)
-endif()
-
-if(ARROW_GANDIVA)
-  add_subdirectory(src/gandiva)
-endif()
-
-if(ARROW_BUILD_EXAMPLES)
-  add_custom_target(runexample ctest -L example)
-  add_subdirectory(examples/arrow)
-endif()
-
-install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/../LICENSE.txt
-              ${CMAKE_CURRENT_SOURCE_DIR}/../NOTICE.txt
-              ${CMAKE_CURRENT_SOURCE_DIR}/README.md
-        DESTINATION "${ARROW_DOC_DIR}")
-
-#
-# Validate and print out Arrow configuration options
-#
-
-validate_config()
-config_summary_message()
-if(${ARROW_BUILD_CONFIG_SUMMARY_JSON})
-  config_summary_json()
-endif()
diff --git a/cpp/CMakeSettings.json b/cpp/CMakeSettings.json
deleted file mode 100644
index 90d3abb..0000000
--- a/cpp/CMakeSettings.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
-  "configurations": [
-  {
-    "name": "x64-Debug (default)",
-    "generator": "Ninja",
-    "configurationType": "Debug",
-    "inheritEnvironments": [ "msvc_x64_x64" ],
-    "buildRoot": "${projectDir}\\out\\build\\${name}",
-    "installRoot": "${projectDir}\\out\\install\\${name}",
-    "cmakeCommandArgs": "",
-    "buildCommandArgs": "",
-    "ctestCommandArgs": "",
-    "variables": [
-        {
-          "name":"VCPKG_MANIFEST_MODE",
-          "value":"OFF"
-        }
-      ]
-    }
-  ]
-}
diff --git a/cpp/README.md b/cpp/README.md
deleted file mode 100644
index b083f3f..0000000
--- a/cpp/README.md
+++ /dev/null
@@ -1,34 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Apache Arrow C++
-
-This directory contains the code and build system for the Arrow C++ libraries,
-as well as for the C++ libraries for Apache Parquet.
-
-## Installation
-
-See https://arrow.apache.org/install/ for the latest instructions how
-to install pre-compiled binary versions of the library.
-
-## Source Builds and Development
-
-Please refer to our latest [C++ Development Documentation][1].
-
-[1]: https://github.com/apache/arrow/blob/master/docs/source/developers/cpp
diff --git a/cpp/apidoc/.gitignore b/cpp/apidoc/.gitignore
deleted file mode 100644
index 5ccff1a..0000000
--- a/cpp/apidoc/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-html/
diff --git a/cpp/apidoc/Doxyfile b/cpp/apidoc/Doxyfile
deleted file mode 100644
index d8b0928..0000000
--- a/cpp/apidoc/Doxyfile
+++ /dev/null
@@ -1,2551 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Doxyfile 1.8.18
-
-# This file describes the settings to be used by the documentation system
-# doxygen (www.doxygen.org) for a project.
-#
-# All text after a double hash (##) is considered a comment and is placed in
-# front of the TAG it is preceding.
-#
-# All text after a single hash (#) is considered a comment and will be ignored.
-# The format is:
-# TAG = value [value, ...]
-# For lists, items can also be appended using:
-# TAG += value [value, ...]
-# Values that contain spaces should be placed between quotes (\" \").
-
-#---------------------------------------------------------------------------
-# Project related configuration options
-#---------------------------------------------------------------------------
-
-# This tag specifies the encoding used for all characters in the configuration
-# file that follow. The default is UTF-8 which is also the encoding used for all
-# text before the first occurrence of this tag. Doxygen uses libiconv (or the
-# iconv built into libc) for the transcoding. See
-# https://www.gnu.org/software/libiconv/ for the list of possible encodings.
-# The default value is: UTF-8.
-
-DOXYFILE_ENCODING      = UTF-8
-
-# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
-# double-quotes, unless you are using Doxywizard) that should identify the
-# project for which the documentation is generated. This name is used in the
-# title of most generated pages and in a few other places.
-# The default value is: My Project.
-
-PROJECT_NAME           = "Apache Arrow (C++)"
-
-# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
-# could be handy for archiving the generated documentation or if some version
-# control system is used.
-
-PROJECT_NUMBER         =
-
-# Using the PROJECT_BRIEF tag one can provide an optional one line description
-# for a project that appears at the top of each page and should give viewer a
-# quick idea about the purpose of the project. Keep the description short.
-
-PROJECT_BRIEF          = "A columnar in-memory analytics layer designed to accelerate big data."
-
-# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
-# in the documentation. The maximum height of the logo should not exceed 55
-# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
-# the logo to the output directory.
-
-PROJECT_LOGO           =
-
-# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
-# into which the generated documentation will be written. If a relative path is
-# entered, it will be relative to the location where doxygen was started. If
-# left blank the current directory will be used.
-
-OUTPUT_DIRECTORY       = $(OUTPUT_DIRECTORY)
-
-# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
-# directories (in 2 levels) under the output directory of each output format and
-# will distribute the generated files over these directories. Enabling this
-# option can be useful when feeding doxygen a huge amount of source files, where
-# putting all generated files in the same directory would otherwise causes
-# performance problems for the file system.
-# The default value is: NO.
-
-CREATE_SUBDIRS         = NO
-
-# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
-# characters to appear in the names of generated files. If set to NO, non-ASCII
-# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
-# U+3044.
-# The default value is: NO.
-
-ALLOW_UNICODE_NAMES    = NO
-
-# The OUTPUT_LANGUAGE tag is used to specify the language in which all
-# documentation generated by doxygen is written. Doxygen will use this
-# information to generate all constant output in the proper language.
-# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
-# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
-# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
-# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
-# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
-# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
-# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
-# Ukrainian and Vietnamese.
-# The default value is: English.
-
-OUTPUT_LANGUAGE        = English
-
-# The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all
-# documentation generated by doxygen is written. Doxygen will use this
-# information to generate all generated output in the proper direction.
-# Possible values are: None, LTR, RTL and Context.
-# The default value is: None.
-
-OUTPUT_TEXT_DIRECTION  = None
-
-# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
-# descriptions after the members that are listed in the file and class
-# documentation (similar to Javadoc). Set to NO to disable this.
-# The default value is: YES.
-
-BRIEF_MEMBER_DESC      = YES
-
-# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
-# description of a member or function before the detailed description
-#
-# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
-# brief descriptions will be completely suppressed.
-# The default value is: YES.
-
-REPEAT_BRIEF           = YES
-
-# This tag implements a quasi-intelligent brief description abbreviator that is
-# used to form the text in various listings. Each string in this list, if found
-# as the leading text of the brief description, will be stripped from the text
-# and the result, after processing the whole list, is used as the annotated
-# text. Otherwise, the brief description is used as-is. If left blank, the
-# following values are used ($name is automatically replaced with the name of
-# the entity):The $name class, The $name widget, The $name file, is, provides,
-# specifies, contains, represents, a, an and the.
-
-ABBREVIATE_BRIEF       = "The $name class" \
-                         "The $name widget" \
-                         "The $name file" \
-                         is \
-                         provides \
-                         specifies \
-                         contains \
-                         represents \
-                         a \
-                         an \
-                         the
-
-# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
-# doxygen will generate a detailed section even if there is only a brief
-# description.
-# The default value is: NO.
-
-ALWAYS_DETAILED_SEC    = NO
-
-# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
-# inherited members of a class in the documentation of that class as if those
-# members were ordinary class members. Constructors, destructors and assignment
-# operators of the base classes will not be shown.
-# The default value is: NO.
-
-INLINE_INHERITED_MEMB  = NO
-
-# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
-# before files name in the file list and in the header files. If set to NO the
-# shortest path that makes the file name unique will be used
-# The default value is: YES.
-
-FULL_PATH_NAMES        = YES
-
-# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
-# Stripping is only done if one of the specified strings matches the left-hand
-# part of the path. The tag can be used to show relative paths in the file list.
-# If left blank the directory from which doxygen is run is used as the path to
-# strip.
-#
-# Note that you can specify absolute paths here, but also relative paths, which
-# will be relative from the directory where doxygen is started.
-# This tag requires that the tag FULL_PATH_NAMES is set to YES.
-
-STRIP_FROM_PATH        =
-
-# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
-# path mentioned in the documentation of a class, which tells the reader which
-# header file to include in order to use a class. If left blank only the name of
-# the header file containing the class definition is used. Otherwise one should
-# specify the list of include paths that are normally passed to the compiler
-# using the -I flag.
-
-STRIP_FROM_INC_PATH    = ../src
-
-# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
-# less readable) file names. This can be useful is your file systems doesn't
-# support long names like on DOS, Mac, or CD-ROM.
-# The default value is: NO.
-
-SHORT_NAMES            = NO
-
-# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
-# first line (until the first dot) of a Javadoc-style comment as the brief
-# description. If set to NO, the Javadoc-style will behave just like regular Qt-
-# style comments (thus requiring an explicit @brief command for a brief
-# description.)
-# The default value is: NO.
-
-JAVADOC_AUTOBRIEF      = YES
-
-# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line
-# such as
-# /***************
-# as being the beginning of a Javadoc-style comment "banner". If set to NO, the
-# Javadoc-style will behave just like regular comments and it will not be
-# interpreted by doxygen.
-# The default value is: NO.
-
-JAVADOC_BANNER         = NO
-
-# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
-# line (until the first dot) of a Qt-style comment as the brief description. If
-# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
-# requiring an explicit \brief command for a brief description.)
-# The default value is: NO.
-
-QT_AUTOBRIEF           = NO
-
-# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
-# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
-# a brief description. This used to be the default behavior. The new default is
-# to treat a multi-line C++ comment block as a detailed description. Set this
-# tag to YES if you prefer the old behavior instead.
-#
-# Note that setting this tag to YES also means that rational rose comments are
-# not recognized any more.
-# The default value is: NO.
-
-MULTILINE_CPP_IS_BRIEF = NO
-
-# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
-# documentation from any documented member that it re-implements.
-# The default value is: YES.
-
-INHERIT_DOCS           = YES
-
-# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
-# page for each member. If set to NO, the documentation of a member will be part
-# of the file/class/namespace that contains it.
-# The default value is: NO.
-
-SEPARATE_MEMBER_PAGES  = NO
-
-# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
-# uses this value to replace tabs by spaces in code fragments.
-# Minimum value: 1, maximum value: 16, default value: 4.
-
-TAB_SIZE               = 4
-
-# This tag can be used to specify a number of aliases that act as commands in
-# the documentation. An alias has the form:
-# name=value
-# For example adding
-# "sideeffect=@par Side Effects:\n"
-# will allow you to put the command \sideeffect (or @sideeffect) in the
-# documentation, which will result in a user-defined paragraph with heading
-# "Side Effects:". You can put \n's in the value part of an alias to insert
-# newlines (in the resulting output). You can put ^^ in the value part of an
-# alias to insert a newline as if a physical newline was in the original file.
-# When you need a literal { or } or , in the value part of an alias you have to
-# escape them by means of a backslash (\), this can lead to conflicts with the
-# commands \{ and \} for these it is advised to use the version @{ and @} or use
-# a double escape (\\{ and \\})
-
-ALIASES                =
-
-# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
-# only. Doxygen will then generate output that is more tailored for C. For
-# instance, some of the names that are used will be different. The list of all
-# members will be omitted, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_FOR_C  = NO
-
-# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
-# Python sources only. Doxygen will then generate output that is more tailored
-# for that language. For instance, namespaces will be presented as packages,
-# qualified scopes will look different, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_JAVA   = NO
-
-# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
-# sources. Doxygen will then generate output that is tailored for Fortran.
-# The default value is: NO.
-
-OPTIMIZE_FOR_FORTRAN   = NO
-
-# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
-# sources. Doxygen will then generate output that is tailored for VHDL.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_VHDL   = NO
-
-# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice
-# sources only. Doxygen will then generate output that is more tailored for that
-# language. For instance, namespaces will be presented as modules, types will be
-# separated into more groups, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_SLICE  = NO
-
-# Doxygen selects the parser to use depending on the extension of the files it
-# parses. With this tag you can assign which parser to use for a given
-# extension. Doxygen has a built-in mapping, but you can override or extend it
-# using this tag. The format is ext=language, where ext is a file extension, and
-# language is one of the parsers supported by doxygen: IDL, Java, JavaScript,
-# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL,
-# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
-# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
-# tries to guess whether the code is fixed or free formatted code, this is the
-# default for Fortran type files). For instance to make doxygen treat .inc files
-# as Fortran files (default is PHP), and .f files as C (default is Fortran),
-# use: inc=Fortran f=C.
-#
-# Note: For files without extension you can use no_extension as a placeholder.
-#
-# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
-# the files are not read by doxygen.
-
-EXTENSION_MAPPING      =
-
-# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
-# according to the Markdown format, which allows for more readable
-# documentation. See https://daringfireball.net/projects/markdown/ for details.
-# The output of markdown processing is further processed by doxygen, so you can
-# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
-# case of backward compatibilities issues.
-# The default value is: YES.
-
-MARKDOWN_SUPPORT       = YES
-
-# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up
-# to that level are automatically included in the table of contents, even if
-# they do not have an id attribute.
-# Note: This feature currently applies only to Markdown headings.
-# Minimum value: 0, maximum value: 99, default value: 5.
-# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
-
-TOC_INCLUDE_HEADINGS   = 0
-
-# When enabled doxygen tries to link words that correspond to documented
-# classes, or namespaces to their corresponding documentation. Such a link can
-# be prevented in individual cases by putting a % sign in front of the word or
-# globally by setting AUTOLINK_SUPPORT to NO.
-# The default value is: YES.
-
-AUTOLINK_SUPPORT       = YES
-
-# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
-# to include (a tag file for) the STL sources as input, then you should set this
-# tag to YES in order to let doxygen match functions declarations and
-# definitions whose arguments contain STL classes (e.g. func(std::string);
-# versus func(std::string) {}). This also make the inheritance and collaboration
-# diagrams that involve STL classes more complete and accurate.
-# The default value is: NO.
-
-BUILTIN_STL_SUPPORT    = NO
-
-# If you use Microsoft's C++/CLI language, you should set this option to YES to
-# enable parsing support.
-# The default value is: NO.
-
-CPP_CLI_SUPPORT        = NO
-
-# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
-# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen
-# will parse them like normal C++ but will assume all classes use public instead
-# of private inheritance when no explicit protection keyword is present.
-# The default value is: NO.
-
-SIP_SUPPORT            = NO
-
-# For Microsoft's IDL there are propget and propput attributes to indicate
-# getter and setter methods for a property. Setting this option to YES will make
-# doxygen to replace the get and set methods by a property in the documentation.
-# This will only work if the methods are indeed getting or setting a simple
-# type. If this is not the case, or you want to show the methods anyway, you
-# should set this option to NO.
-# The default value is: YES.
-
-IDL_PROPERTY_SUPPORT   = YES
-
-# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
-# tag is set to YES then doxygen will reuse the documentation of the first
-# member in the group (if any) for the other members of the group. By default
-# all members of a group must be documented explicitly.
-# The default value is: NO.
-
-DISTRIBUTE_GROUP_DOC   = NO
-
-# If one adds a struct or class to a group and this option is enabled, then also
-# any nested class or struct is added to the same group. By default this option
-# is disabled and one has to add nested compounds explicitly via \ingroup.
-# The default value is: NO.
-
-GROUP_NESTED_COMPOUNDS = NO
-
-# Set the SUBGROUPING tag to YES to allow class member groups of the same type
-# (for instance a group of public functions) to be put as a subgroup of that
-# type (e.g. under the Public Functions section). Set it to NO to prevent
-# subgrouping. Alternatively, this can be done per class using the
-# \nosubgrouping command.
-# The default value is: YES.
-
-SUBGROUPING            = YES
-
-# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
-# are shown inside the group in which they are included (e.g. using \ingroup)
-# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
-# and RTF).
-#
-# Note that this feature does not work in combination with
-# SEPARATE_MEMBER_PAGES.
-# The default value is: NO.
-
-INLINE_GROUPED_CLASSES = NO
-
-# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
-# with only public data fields or simple typedef fields will be shown inline in
-# the documentation of the scope in which they are defined (i.e. file,
-# namespace, or group documentation), provided this scope is documented. If set
-# to NO, structs, classes, and unions are shown on a separate page (for HTML and
-# Man pages) or section (for LaTeX and RTF).
-# The default value is: NO.
-
-INLINE_SIMPLE_STRUCTS  = NO
-
-# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
-# enum is documented as struct, union, or enum with the name of the typedef. So
-# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
-# with name TypeT. When disabled the typedef will appear as a member of a file,
-# namespace, or class. And the struct will be named TypeS. This can typically be
-# useful for C code in case the coding convention dictates that all compound
-# types are typedef'ed and only the typedef is referenced, never the tag name.
-# The default value is: NO.
-
-TYPEDEF_HIDES_STRUCT   = NO
-
-# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
-# cache is used to resolve symbols given their name and scope. Since this can be
-# an expensive process and often the same symbol appears multiple times in the
-# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
-# doxygen will become slower. If the cache is too large, memory is wasted. The
-# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
-# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
-# symbols. At the end of a run doxygen will report the cache usage and suggest
-# the optimal cache size from a speed point of view.
-# Minimum value: 0, maximum value: 9, default value: 0.
-
-LOOKUP_CACHE_SIZE      = 0
-
-#---------------------------------------------------------------------------
-# Build related configuration options
-#---------------------------------------------------------------------------
-
-# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
-# documentation are documented, even if no documentation was available. Private
-# class members and static file members will be hidden unless the
-# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
-# Note: This will also disable the warnings about undocumented members that are
-# normally produced when WARNINGS is set to YES.
-# The default value is: NO.
-
-EXTRACT_ALL            = YES
-
-# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
-# be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PRIVATE        = NO
-
-# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual
-# methods of a class will be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PRIV_VIRTUAL   = NO
-
-# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
-# scope will be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PACKAGE        = NO
-
-# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
-# included in the documentation.
-# The default value is: NO.
-
-EXTRACT_STATIC         = NO
-
-# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
-# locally in source files will be included in the documentation. If set to NO,
-# only classes defined in header files are included. Does not have any effect
-# for Java sources.
-# The default value is: YES.
-
-EXTRACT_LOCAL_CLASSES  = YES
-
-# This flag is only useful for Objective-C code. If set to YES, local methods,
-# which are defined in the implementation section but not in the interface are
-# included in the documentation. If set to NO, only methods in the interface are
-# included.
-# The default value is: NO.
-
-EXTRACT_LOCAL_METHODS  = NO
-
-# If this flag is set to YES, the members of anonymous namespaces will be
-# extracted and appear in the documentation as a namespace called
-# 'anonymous_namespace{file}', where file will be replaced with the base name of
-# the file that contains the anonymous namespace. By default anonymous namespace
-# are hidden.
-# The default value is: NO.
-
-EXTRACT_ANON_NSPACES   = NO
-
-# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
-# undocumented members inside documented classes or files. If set to NO these
-# members will be included in the various overviews, but no documentation
-# section is generated. This option has no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_MEMBERS     = NO
-
-# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
-# undocumented classes that are normally visible in the class hierarchy. If set
-# to NO, these classes will be included in the various overviews. This option
-# has no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_CLASSES     = NO
-
-# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
-# declarations. If set to NO, these declarations will be included in the
-# documentation.
-# The default value is: NO.
-
-HIDE_FRIEND_COMPOUNDS  = YES
-
-# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
-# documentation blocks found inside the body of a function. If set to NO, these
-# blocks will be appended to the function's detailed documentation block.
-# The default value is: NO.
-
-HIDE_IN_BODY_DOCS      = NO
-
-# The INTERNAL_DOCS tag determines if documentation that is typed after a
-# \internal command is included. If the tag is set to NO then the documentation
-# will be excluded. Set it to YES to include the internal documentation.
-# The default value is: NO.
-
-INTERNAL_DOCS          = NO
-
-# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
-# names in lower-case letters. If set to YES, upper-case letters are also
-# allowed. This is useful if you have classes or files whose names only differ
-# in case and if your file system supports case sensitive file names. Windows
-# (including Cygwin) ands Mac users are advised to set this option to NO.
-# The default value is: system dependent.
-
-CASE_SENSE_NAMES       = NO
-
-# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
-# their full class and namespace scopes in the documentation. If set to YES, the
-# scope will be hidden.
-# The default value is: NO.
-
-HIDE_SCOPE_NAMES       = NO
-
-# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
-# append additional text to a page's title, such as Class Reference. If set to
-# YES the compound reference will be hidden.
-# The default value is: NO.
-
-HIDE_COMPOUND_REFERENCE= NO
-
-# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
-# the files that are included by a file in the documentation of that file.
-# The default value is: YES.
-
-SHOW_INCLUDE_FILES     = YES
-
-# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
-# grouped member an include statement to the documentation, telling the reader
-# which file to include in order to use the member.
-# The default value is: NO.
-
-SHOW_GROUPED_MEMB_INC  = NO
-
-# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
-# files with double quotes in the documentation rather than with sharp brackets.
-# The default value is: NO.
-
-FORCE_LOCAL_INCLUDES   = NO
-
-# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
-# documentation for inline members.
-# The default value is: YES.
-
-INLINE_INFO            = YES
-
-# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
-# (detailed) documentation of file and class members alphabetically by member
-# name. If set to NO, the members will appear in declaration order.
-# The default value is: YES.
-
-SORT_MEMBER_DOCS       = YES
-
-# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
-# descriptions of file, namespace and class members alphabetically by member
-# name. If set to NO, the members will appear in declaration order. Note that
-# this will also influence the order of the classes in the class list.
-# The default value is: NO.
-
-SORT_BRIEF_DOCS        = NO
-
-# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
-# (brief and detailed) documentation of class members so that constructors and
-# destructors are listed first. If set to NO the constructors will appear in the
-# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
-# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
-# member documentation.
-# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
-# detailed member documentation.
-# The default value is: NO.
-
-SORT_MEMBERS_CTORS_1ST = NO
-
-# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
-# of group names into alphabetical order. If set to NO the group names will
-# appear in their defined order.
-# The default value is: NO.
-
-SORT_GROUP_NAMES       = NO
-
-# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
-# fully-qualified names, including namespaces. If set to NO, the class list will
-# be sorted only by class name, not including the namespace part.
-# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
-# Note: This option applies only to the class list, not to the alphabetical
-# list.
-# The default value is: NO.
-
-SORT_BY_SCOPE_NAME     = NO
-
-# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
-# type resolution of all parameters of a function it will reject a match between
-# the prototype and the implementation of a member function even if there is
-# only one candidate or it is obvious which candidate to choose by doing a
-# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
-# accept a match between prototype and implementation in such cases.
-# The default value is: NO.
-
-STRICT_PROTO_MATCHING  = NO
-
-# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
-# list. This list is created by putting \todo commands in the documentation.
-# The default value is: YES.
-
-GENERATE_TODOLIST      = YES
-
-# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
-# list. This list is created by putting \test commands in the documentation.
-# The default value is: YES.
-
-GENERATE_TESTLIST      = YES
-
-# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
-# list. This list is created by putting \bug commands in the documentation.
-# The default value is: YES.
-
-GENERATE_BUGLIST       = YES
-
-# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
-# the deprecated list. This list is created by putting \deprecated commands in
-# the documentation.
-# The default value is: YES.
-
-GENERATE_DEPRECATEDLIST= YES
-
-# The ENABLED_SECTIONS tag can be used to enable conditional documentation
-# sections, marked by \if <section_label> ... \endif and \cond <section_label>
-# ... \endcond blocks.
-
-ENABLED_SECTIONS       =
-
-# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
-# initial value of a variable or macro / define can have for it to appear in the
-# documentation. If the initializer consists of more lines than specified here
-# it will be hidden. Use a value of 0 to hide initializers completely. The
-# appearance of the value of individual variables and macros / defines can be
-# controlled using \showinitializer or \hideinitializer command in the
-# documentation regardless of this setting.
-# Minimum value: 0, maximum value: 10000, default value: 30.
-
-MAX_INITIALIZER_LINES  = 30
-
-# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
-# the bottom of the documentation of classes and structs. If set to YES, the
-# list will mention the files that were used to generate the documentation.
-# The default value is: YES.
-
-SHOW_USED_FILES        = YES
-
-# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
-# will remove the Files entry from the Quick Index and from the Folder Tree View
-# (if specified).
-# The default value is: YES.
-
-SHOW_FILES             = YES
-
-# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
-# page. This will remove the Namespaces entry from the Quick Index and from the
-# Folder Tree View (if specified).
-# The default value is: YES.
-
-SHOW_NAMESPACES        = YES
-
-# The FILE_VERSION_FILTER tag can be used to specify a program or script that
-# doxygen should invoke to get the current version for each file (typically from
-# the version control system). Doxygen will invoke the program by executing (via
-# popen()) the command command input-file, where command is the value of the
-# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
-# by doxygen. Whatever the program writes to standard output is used as the file
-# version. For an example see the documentation.
-
-FILE_VERSION_FILTER    =
-
-# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
-# by doxygen. The layout file controls the global structure of the generated
-# output files in an output format independent way. To create the layout file
-# that represents doxygen's defaults, run doxygen with the -l option. You can
-# optionally specify a file name after the option, if omitted DoxygenLayout.xml
-# will be used as the name of the layout file.
-#
-# Note that if you run doxygen from a directory containing a file called
-# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
-# tag is left empty.
-
-LAYOUT_FILE            =
-
-# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
-# the reference definitions. This must be a list of .bib files. The .bib
-# extension is automatically appended if omitted. This requires the bibtex tool
-# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info.
-# For LaTeX the style of the bibliography can be controlled using
-# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
-# search path. See also \cite for info how to create references.
-
-CITE_BIB_FILES         =
-
-#---------------------------------------------------------------------------
-# Configuration options related to warning and progress messages
-#---------------------------------------------------------------------------
-
-# The QUIET tag can be used to turn on/off the messages that are generated to
-# standard output by doxygen. If QUIET is set to YES this implies that the
-# messages are off.
-# The default value is: NO.
-
-QUIET                  = YES
-
-# The WARNINGS tag can be used to turn on/off the warning messages that are
-# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
-# this implies that the warnings are on.
-#
-# Tip: Turn warnings on while writing the documentation.
-# The default value is: YES.
-
-WARNINGS               = YES
-
-# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
-# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
-# will automatically be disabled.
-# The default value is: YES.
-
-WARN_IF_UNDOCUMENTED   = YES
-
-# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
-# potential errors in the documentation, such as not documenting some parameters
-# in a documented function, or documenting parameters that don't exist or using
-# markup commands wrongly.
-# The default value is: YES.
-
-WARN_IF_DOC_ERROR      = YES
-
-# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
-# are documented, but have no documentation for their parameters or return
-# value. If set to NO, doxygen will only warn about wrong or incomplete
-# parameter documentation, but not about the absence of documentation. If
-# EXTRACT_ALL is set to YES then this flag will automatically be disabled.
-# The default value is: NO.
-
-WARN_NO_PARAMDOC       = NO
-
-# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
-# a warning is encountered.
-# The default value is: NO.
-
-WARN_AS_ERROR          = YES
-
-# The WARN_FORMAT tag determines the format of the warning messages that doxygen
-# can produce. The string should contain the $file, $line, and $text tags, which
-# will be replaced by the file and line number from which the warning originated
-# and the warning text. Optionally the format may contain $version, which will
-# be replaced by the version of the file (if it could be obtained via
-# FILE_VERSION_FILTER)
-# The default value is: $file:$line: $text.
-
-WARN_FORMAT            = "$file:$line: $text"
-
-# The WARN_LOGFILE tag can be used to specify a file to which warning and error
-# messages should be written. If left blank the output is written to standard
-# error (stderr).
-
-WARN_LOGFILE           =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the input files
-#---------------------------------------------------------------------------
-
-# The INPUT tag is used to specify the files and/or directories that contain
-# documented source files. You may enter file names like myfile.cpp or
-# directories like /usr/src/myproject. Separate the files or directories with
-# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
-# Note: If this tag is empty the current directory is searched.
-
-INPUT                  = ../src \
-                         .
-
-# This tag can be used to specify the character encoding of the source files
-# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
-# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
-# documentation (see: https://www.gnu.org/software/libiconv/) for the list of
-# possible encodings.
-# The default value is: UTF-8.
-
-INPUT_ENCODING         = UTF-8
-
-# If the value of the INPUT tag contains directories, you can use the
-# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
-# *.h) to filter out the source-files in the directories.
-#
-# Note that for custom extensions or not directly supported extensions you also
-# need to set EXTENSION_MAPPING for the extension otherwise the files are not
-# read by doxygen.
-#
-# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
-# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
-# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
-# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment),
-# *.doc (to be provided as doxygen C comment), *.txt (to be provided as doxygen
-# C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd,
-# *.vhdl, *.ucf, *.qsf and *.ice.
-
-FILE_PATTERNS          = *.h \
-                         *.hh \
-                         *.hxx \
-                         *.hpp \
-                         *.inc \
-                         *.m \
-                         *.markdown \
-                         *.md \
-                         *.mm \
-                         *.dox \
-                         *.py
-
-# The RECURSIVE tag can be used to specify whether or not subdirectories should
-# be searched for input files as well.
-# The default value is: NO.
-
-RECURSIVE              = YES
-
-# The EXCLUDE tag can be used to specify files and/or directories that should be
-# excluded from the INPUT source files. This way you can easily exclude a
-# subdirectory from a directory tree whose root is specified with the INPUT tag.
-#
-# Note that relative paths are relative to the directory from which doxygen is
-# run.
-
-EXCLUDE                = ../src/arrow/vendored \
-                         ../src/generated
-
-# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
-# directories that are symbolic links (a Unix file system feature) are excluded
-# from the input.
-# The default value is: NO.
-
-EXCLUDE_SYMLINKS       = NO
-
-# If the value of the INPUT tag contains directories, you can use the
-# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
-# certain files from those directories.
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories for example use the pattern */test/*
-
-EXCLUDE_PATTERNS       = *-test.cc \
-                         *test* \
-                         *_generated.h \
-                         *-benchmark.cc \
-                         *internal*
-
-# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
-# (namespaces, classes, functions, etc.) that should be excluded from the
-# output. The symbol name can be a fully qualified name, a word, or if the
-# wildcard * is used, a substring. Examples: ANamespace, AClass,
-# AClass::ANamespace, ANamespace::*Test
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories use the pattern */test/*
-
-EXCLUDE_SYMBOLS        = detail \
-                         internal \
-                         _* \
-                         BitUtil \
-                         SSEUtil
-
-# The EXAMPLE_PATH tag can be used to specify one or more files or directories
-# that contain example code fragments that are included (see the \include
-# command).
-
-EXAMPLE_PATH           =
-
-# If the value of the EXAMPLE_PATH tag contains directories, you can use the
-# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
-# *.h) to filter out the source-files in the directories. If left blank all
-# files are included.
-
-EXAMPLE_PATTERNS       = *
-
-# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
-# searched for input files to be used with the \include or \dontinclude commands
-# irrespective of the value of the RECURSIVE tag.
-# The default value is: NO.
-
-EXAMPLE_RECURSIVE      = NO
-
-# The IMAGE_PATH tag can be used to specify one or more files or directories
-# that contain images that are to be included in the documentation (see the
-# \image command).
-
-IMAGE_PATH             =
-
-# The INPUT_FILTER tag can be used to specify a program that doxygen should
-# invoke to filter for each input file. Doxygen will invoke the filter program
-# by executing (via popen()) the command:
-#
-# <filter> <input-file>
-#
-# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
-# name of an input file. Doxygen will then use the output that the filter
-# program writes to standard output. If FILTER_PATTERNS is specified, this tag
-# will be ignored.
-#
-# Note that the filter must not add or remove lines; it is applied before the
-# code is scanned, but not when the output code is generated. If lines are added
-# or removed, the anchors will not be placed correctly.
-#
-# Note that for custom extensions or not directly supported extensions you also
-# need to set EXTENSION_MAPPING for the extension otherwise the files are not
-# properly processed by doxygen.
-
-INPUT_FILTER           =
-
-# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
-# basis. Doxygen will compare the file name with each pattern and apply the
-# filter if there is a match. The filters are a list of the form: pattern=filter
-# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
-# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
-# patterns match the file name, INPUT_FILTER is applied.
-#
-# Note that for custom extensions or not directly supported extensions you also
-# need to set EXTENSION_MAPPING for the extension otherwise the files are not
-# properly processed by doxygen.
-
-FILTER_PATTERNS        =
-
-# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
-# INPUT_FILTER) will also be used to filter the input files that are used for
-# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
-# The default value is: NO.
-
-FILTER_SOURCE_FILES    = NO
-
-# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
-# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
-# it is also possible to disable source filtering for a specific pattern using
-# *.ext= (so without naming a filter).
-# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
-
-FILTER_SOURCE_PATTERNS =
-
-# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
-# is part of the input, its contents will be placed on the main page
-# (index.html). This can be useful if you have a project on for instance GitHub
-# and want to reuse the introduction page also for the doxygen output.
-
-USE_MDFILE_AS_MAINPAGE =
-
-#---------------------------------------------------------------------------
-# Configuration options related to source browsing
-#---------------------------------------------------------------------------
-
-# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
-# generated. Documented entities will be cross-referenced with these sources.
-#
-# Note: To get rid of all source code in the generated output, make sure that
-# also VERBATIM_HEADERS is set to NO.
-# The default value is: NO.
-
-SOURCE_BROWSER         = NO
-
-# Setting the INLINE_SOURCES tag to YES will include the body of functions,
-# classes and enums directly into the documentation.
-# The default value is: NO.
-
-INLINE_SOURCES         = NO
-
-# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
-# special comment blocks from generated source code fragments. Normal C, C++ and
-# Fortran comments will always remain visible.
-# The default value is: YES.
-
-STRIP_CODE_COMMENTS    = YES
-
-# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
-# entity all documented functions referencing it will be listed.
-# The default value is: NO.
-
-REFERENCED_BY_RELATION = NO
-
-# If the REFERENCES_RELATION tag is set to YES then for each documented function
-# all documented entities called/used by that function will be listed.
-# The default value is: NO.
-
-REFERENCES_RELATION    = NO
-
-# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
-# to YES then the hyperlinks from functions in REFERENCES_RELATION and
-# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
-# link to the documentation.
-# The default value is: YES.
-
-REFERENCES_LINK_SOURCE = YES
-
-# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
-# source code will show a tooltip with additional information such as prototype,
-# brief description and links to the definition and documentation. Since this
-# will make the HTML file larger and loading of large files a bit slower, you
-# can opt to disable this feature.
-# The default value is: YES.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-SOURCE_TOOLTIPS        = YES
-
-# If the USE_HTAGS tag is set to YES then the references to source code will
-# point to the HTML generated by the htags(1) tool instead of doxygen built-in
-# source browser. The htags tool is part of GNU's global source tagging system
-# (see https://www.gnu.org/software/global/global.html). You will need version
-# 4.8.6 or higher.
-#
-# To use it do the following:
-# - Install the latest version of global
-# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file
-# - Make sure the INPUT points to the root of the source tree
-# - Run doxygen as normal
-#
-# Doxygen will invoke htags (and that will in turn invoke gtags), so these
-# tools must be available from the command line (i.e. in the search path).
-#
-# The result: instead of the source browser generated by doxygen, the links to
-# source code will now point to the output of htags.
-# The default value is: NO.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-USE_HTAGS              = NO
-
-# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
-# verbatim copy of the header file for each class for which an include is
-# specified. Set to NO to disable this.
-# See also: Section \class.
-# The default value is: YES.
-
-VERBATIM_HEADERS       = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the alphabetical class index
-#---------------------------------------------------------------------------
-
-# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
-# compounds will be generated. Enable this if the project contains a lot of
-# classes, structs, unions or interfaces.
-# The default value is: YES.
-
-ALPHABETICAL_INDEX     = YES
-
-# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
-# which the alphabetical index list will be split.
-# Minimum value: 1, maximum value: 20, default value: 5.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-COLS_IN_ALPHA_INDEX    = 5
-
-# In case all classes in a project start with a common prefix, all classes will
-# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
-# can be used to specify a prefix (or a list of prefixes) that should be ignored
-# while generating the index headers.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-IGNORE_PREFIX          =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the HTML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
-# The default value is: YES.
-
-GENERATE_HTML          = YES
-
-# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_OUTPUT            = html
-
-# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
-# generated HTML page (for example: .htm, .php, .asp).
-# The default value is: .html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FILE_EXTENSION    = .html
-
-# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
-# each generated HTML page. If the tag is left blank doxygen will generate a
-# standard header.
-#
-# To get valid HTML the header file that includes any scripts and style sheets
-# that doxygen needs, which is dependent on the configuration options used (e.g.
-# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
-# default header using
-# doxygen -w html new_header.html new_footer.html new_stylesheet.css
-# YourConfigFile
-# and then modify the file new_header.html. See also section "Doxygen usage"
-# for information on how to generate the default header that doxygen normally
-# uses.
-# Note: The header is subject to change so you typically have to regenerate the
-# default header when upgrading to a newer version of doxygen. For a description
-# of the possible markers and block names see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_HEADER            =
-
-# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
-# generated HTML page. If the tag is left blank doxygen will generate a standard
-# footer. See HTML_HEADER for more information on how to generate a default
-# footer and what special commands can be used inside the footer. See also
-# section "Doxygen usage" for information on how to generate the default footer
-# that doxygen normally uses.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FOOTER            = footer.html
-
-# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
-# sheet that is used by each HTML page. It can be used to fine-tune the look of
-# the HTML output. If left blank doxygen will generate a default style sheet.
-# See also section "Doxygen usage" for information on how to generate the style
-# sheet that doxygen normally uses.
-# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
-# it is more robust and this tag (HTML_STYLESHEET) will in the future become
-# obsolete.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_STYLESHEET        =
-
-# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
-# cascading style sheets that are included after the standard style sheets
-# created by doxygen. Using this option one can overrule certain style aspects.
-# This is preferred over using HTML_STYLESHEET since it does not replace the
-# standard style sheet and is therefore more robust against future updates.
-# Doxygen will copy the style sheet files to the output directory.
-# Note: The order of the extra style sheet files is of importance (e.g. the last
-# style sheet in the list overrules the setting of the previous ones in the
-# list). For an example see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_STYLESHEET  =
-
-# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the HTML output directory. Note
-# that these files will be copied to the base HTML output directory. Use the
-# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
-# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
-# files will be copied as-is; there are no commands or markers available.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_FILES       =
-
-# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
-# will adjust the colors in the style sheet and background images according to
-# this color. Hue is specified as an angle on a colorwheel, see
-# https://en.wikipedia.org/wiki/Hue for more information. For instance the value
-# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
-# purple, and 360 is red again.
-# Minimum value: 0, maximum value: 359, default value: 220.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_HUE    = 220
-
-# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
-# in the HTML output. For a value of 0 the output will use grayscales only. A
-# value of 255 will produce the most vivid colors.
-# Minimum value: 0, maximum value: 255, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_SAT    = 100
-
-# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
-# luminance component of the colors in the HTML output. Values below 100
-# gradually make the output lighter, whereas values above 100 make the output
-# darker. The value divided by 100 is the actual gamma applied, so 80 represents
-# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
-# change the gamma.
-# Minimum value: 40, maximum value: 240, default value: 80.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_GAMMA  = 80
-
-# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
-# page will contain the date and time when the page was generated. Setting this
-# to YES can help to show when doxygen was last run and thus if the
-# documentation is up to date.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_TIMESTAMP         = NO
-
-# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
-# documentation will contain a main index with vertical navigation menus that
-# are dynamically created via JavaScript. If disabled, the navigation index will
-# consists of multiple levels of tabs that are statically embedded in every HTML
-# page. Disable this option to support browsers that do not have JavaScript,
-# like the Qt help browser.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_DYNAMIC_MENUS     = YES
-
-# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
-# documentation will contain sections that can be hidden and shown after the
-# page has loaded.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_DYNAMIC_SECTIONS  = NO
-
-# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
-# shown in the various tree structured indices initially; the user can expand
-# and collapse entries dynamically later on. Doxygen will expand the tree to
-# such a level that at most the specified number of entries are visible (unless
-# a fully collapsed tree already exceeds this amount). So setting the number of
-# entries 1 will produce a full collapsed tree by default. 0 is a special value
-# representing an infinite number of entries and will result in a full expanded
-# tree by default.
-# Minimum value: 0, maximum value: 9999, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_INDEX_NUM_ENTRIES = 100
-
-# If the GENERATE_DOCSET tag is set to YES, additional index files will be
-# generated that can be used as input for Apple's Xcode 3 integrated development
-# environment (see: https://developer.apple.com/xcode/), introduced with OSX
-# 10.5 (Leopard). To create a documentation set, doxygen will generate a
-# Makefile in the HTML output directory. Running make will produce the docset in
-# that directory and running make install will install the docset in
-# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
-# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
-# genXcode/_index.html for more information.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_DOCSET        = NO
-
-# This tag determines the name of the docset feed. A documentation feed provides
-# an umbrella under which multiple documentation sets from a single provider
-# (such as a company or product suite) can be grouped.
-# The default value is: Doxygen generated docs.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_FEEDNAME        = "Doxygen generated docs"
-
-# This tag specifies a string that should uniquely identify the documentation
-# set bundle. This should be a reverse domain-name style string, e.g.
-# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_BUNDLE_ID       = org.doxygen.Project
-
-# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
-# the documentation publisher. This should be a reverse domain-name style
-# string, e.g. com.mycompany.MyDocSet.documentation.
-# The default value is: org.doxygen.Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
-
-# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
-# The default value is: Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_NAME  = Publisher
-
-# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
-# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
-# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
-# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on
-# Windows.
-#
-# The HTML Help Workshop contains a compiler that can convert all HTML output
-# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
-# files are now used as the Windows 98 help format, and will replace the old
-# Windows help format (.hlp) on all Windows platforms in the future. Compressed
-# HTML files also contain an index, a table of contents, and you can search for
-# words in the documentation. The HTML workshop also contains a viewer for
-# compressed HTML files.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_HTMLHELP      = NO
-
-# The CHM_FILE tag can be used to specify the file name of the resulting .chm
-# file. You can add a path in front of the file if the result should not be
-# written to the html output directory.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_FILE               =
-
-# The HHC_LOCATION tag can be used to specify the location (absolute path
-# including file name) of the HTML help compiler (hhc.exe). If non-empty,
-# doxygen will try to run the HTML help compiler on the generated index.hhp.
-# The file has to be specified with full path.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-HHC_LOCATION           =
-
-# The GENERATE_CHI flag controls if a separate .chi index file is generated
-# (YES) or that it should be included in the master .chm file (NO).
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-GENERATE_CHI           = NO
-
-# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
-# and project file content.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_INDEX_ENCODING     =
-
-# The BINARY_TOC flag controls whether a binary table of contents is generated
-# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
-# enables the Previous and Next buttons.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-BINARY_TOC             = NO
-
-# The TOC_EXPAND flag can be set to YES to add extra items for group members to
-# the table of contents of the HTML help documentation and to the tree view.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-TOC_EXPAND             = NO
-
-# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
-# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
-# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
-# (.qch) of the generated HTML documentation.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_QHP           = NO
-
-# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
-# the file name of the resulting .qch file. The path specified is relative to
-# the HTML output folder.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QCH_FILE               =
-
-# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
-# Project output. For more information please see Qt Help Project / Namespace
-# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_NAMESPACE          = org.doxygen.Project
-
-# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
-# Help Project output. For more information please see Qt Help Project / Virtual
-# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-
-# folders).
-# The default value is: doc.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_VIRTUAL_FOLDER     = doc
-
-# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
-# filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_NAME   =
-
-# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
-# custom filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_ATTRS  =
-
-# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
-# project's filter section matches. Qt Help Project / Filter Attributes (see:
-# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_SECT_FILTER_ATTRS  =
-
-# The QHG_LOCATION tag can be used to specify the location of Qt's
-# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
-# generated .qhp file.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHG_LOCATION           =
-
-# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
-# generated, together with the HTML files, they form an Eclipse help plugin. To
-# install this plugin and make it available under the help contents menu in
-# Eclipse, the contents of the directory containing the HTML and XML files needs
-# to be copied into the plugins directory of eclipse. The name of the directory
-# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
-# After copying Eclipse needs to be restarted before the help appears.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_ECLIPSEHELP   = NO
-
-# A unique identifier for the Eclipse help plugin. When installing the plugin
-# the directory name containing the HTML and XML files should also have this
-# name. Each documentation set should have its own identifier.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
-
-ECLIPSE_DOC_ID         = org.doxygen.Project
-
-# If you want full control over the layout of the generated HTML pages it might
-# be necessary to disable the index and replace it with your own. The
-# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
-# of each HTML page. A value of NO enables the index and the value YES disables
-# it. Since the tabs in the index contain the same information as the navigation
-# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-DISABLE_INDEX          = NO
-
-# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
-# structure should be generated to display hierarchical information. If the tag
-# value is set to YES, a side panel will be generated containing a tree-like
-# index structure (just like the one that is generated for HTML Help). For this
-# to work a browser that supports JavaScript, DHTML, CSS and frames is required
-# (i.e. any modern browser). Windows users are probably better off using the
-# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
-# further fine-tune the look of the index. As an example, the default style
-# sheet generated by doxygen has an example that shows how to put an image at
-# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
-# the same information as the tab index, you could consider setting
-# DISABLE_INDEX to YES when enabling this option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_TREEVIEW      = NO
-
-# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
-# doxygen will group on one line in the generated HTML documentation.
-#
-# Note that a value of 0 will completely suppress the enum values from appearing
-# in the overview section.
-# Minimum value: 0, maximum value: 20, default value: 4.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-ENUM_VALUES_PER_LINE   = 4
-
-# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
-# to set the initial width (in pixels) of the frame in which the tree is shown.
-# Minimum value: 0, maximum value: 1500, default value: 250.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-TREEVIEW_WIDTH         = 250
-
-# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
-# external symbols imported via tag files in a separate window.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-EXT_LINKS_IN_WINDOW    = NO
-
-# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg
-# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see
-# https://inkscape.org) to generate formulas as SVG images instead of PNGs for
-# the HTML output. These images will generally look nicer at scaled resolutions.
-# Possible values are: png The default and svg Looks nicer but requires the
-# pdf2svg tool.
-# The default value is: png.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FORMULA_FORMAT    = png
-
-# Use this tag to change the font size of LaTeX formulas included as images in
-# the HTML documentation. When you change the font size after a successful
-# doxygen run you need to manually remove any form_*.png images from the HTML
-# output directory to force them to be regenerated.
-# Minimum value: 8, maximum value: 50, default value: 10.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_FONTSIZE       = 10
-
-# Use the FORMULA_TRANSPARENT tag to determine whether or not the images
-# generated for formulas are transparent PNGs. Transparent PNGs are not
-# supported properly for IE 6.0, but are supported on all modern browsers.
-#
-# Note that when changing this option you need to delete any form_*.png files in
-# the HTML output directory before the changes have effect.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_TRANSPARENT    = YES
-
-# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands
-# to create new LaTeX commands to be used in formulas as building blocks. See
-# the section "Including formulas" for details.
-
-FORMULA_MACROFILE      =
-
-# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
-# https://www.mathjax.org) which uses client side JavaScript for the rendering
-# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
-# installed or if you want to formulas look prettier in the HTML output. When
-# enabled you may also need to install MathJax separately and configure the path
-# to it using the MATHJAX_RELPATH option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-USE_MATHJAX            = NO
-
-# When MathJax is enabled you can set the default output format to be used for
-# the MathJax output. See the MathJax site (see:
-# http://docs.mathjax.org/en/latest/output.html) for more details.
-# Possible values are: HTML-CSS (which is slower, but has the best
-# compatibility), NativeMML (i.e. MathML) and SVG.
-# The default value is: HTML-CSS.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_FORMAT         = HTML-CSS
-
-# When MathJax is enabled you need to specify the location relative to the HTML
-# output directory using the MATHJAX_RELPATH option. The destination directory
-# should contain the MathJax.js script. For instance, if the mathjax directory
-# is located at the same level as the HTML output directory, then
-# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
-# Content Delivery Network so you can quickly see the result without installing
-# MathJax. However, it is strongly recommended to install a local copy of
-# MathJax from https://www.mathjax.org before deployment.
-# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_RELPATH        = http://cdn.mathjax.org/mathjax/latest
-
-# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
-# extension names that should be enabled during MathJax rendering. For example
-# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_EXTENSIONS     =
-
-# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
-# of code that will be used on startup of the MathJax code. See the MathJax site
-# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
-# example see the documentation.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_CODEFILE       =
-
-# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
-# the HTML output. The underlying search engine uses javascript and DHTML and
-# should work on any modern browser. Note that when using HTML help
-# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
-# there is already a search function so this one should typically be disabled.
-# For large projects the javascript based search engine can be slow, then
-# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
-# search using the keyboard; to jump to the search box use <access key> + S
-# (what the <access key> is depends on the OS and browser, but it is typically
-# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
-# key> to jump into the search results window, the results can be navigated
-# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
-# the search. The filter options can be selected when the cursor is inside the
-# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
-# to select a filter and <Enter> or <escape> to activate or cancel the filter
-# option.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-SEARCHENGINE           = YES
-
-# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
-# implemented using a web server instead of a web client using JavaScript. There
-# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
-# setting. When disabled, doxygen will generate a PHP script for searching and
-# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
-# and searching needs to be provided by external tools. See the section
-# "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SERVER_BASED_SEARCH    = NO
-
-# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
-# script for searching. Instead the search results are written to an XML file
-# which needs to be processed by an external indexer. Doxygen will invoke an
-# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
-# search results.
-#
-# Doxygen ships with an example indexer (doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: https://xapian.org/).
-#
-# See the section "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH        = NO
-
-# The SEARCHENGINE_URL should point to a search engine hosted by a web server
-# which will return the search results when EXTERNAL_SEARCH is enabled.
-#
-# Doxygen ships with an example indexer (doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: https://xapian.org/). See the section "External Indexing and
-# Searching" for details.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHENGINE_URL       =
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
-# search data is written to a file for indexing by an external tool. With the
-# SEARCHDATA_FILE tag the name of this file can be specified.
-# The default file is: searchdata.xml.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHDATA_FILE        = searchdata.xml
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
-# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
-# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
-# projects and redirect the results back to the right project.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH_ID     =
-
-# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
-# projects other than the one defined by this configuration file, but that are
-# all added to the same external search index. Each project needs to have a
-# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
-# to a relative location where the documentation can be found. The format is:
-# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTRA_SEARCH_MAPPINGS  =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the LaTeX output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
-# The default value is: YES.
-
-GENERATE_LATEX         = NO
-
-# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_OUTPUT           = latex
-
-# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
-# invoked.
-#
-# Note that when not enabling USE_PDFLATEX the default is latex when enabling
-# USE_PDFLATEX the default is pdflatex and when in the later case latex is
-# chosen this is overwritten by pdflatex. For specific output languages the
-# default can have been set differently, this depends on the implementation of
-# the output language.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_CMD_NAME         = latex
-
-# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
-# index for LaTeX.
-# Note: This tag is used in the Makefile / make.bat.
-# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file
-# (.tex).
-# The default file is: makeindex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-MAKEINDEX_CMD_NAME     = makeindex
-
-# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to
-# generate index for LaTeX. In case there is no backslash (\) as first character
-# it will be automatically added in the LaTeX code.
-# Note: This tag is used in the generated output file (.tex).
-# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat.
-# The default value is: makeindex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_MAKEINDEX_CMD    = makeindex
-
-# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-COMPACT_LATEX          = NO
-
-# The PAPER_TYPE tag can be used to set the paper type that is used by the
-# printer.
-# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
-# 14 inches) and executive (7.25 x 10.5 inches).
-# The default value is: a4.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PAPER_TYPE             = a4
-
-# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
-# that should be included in the LaTeX output. The package can be specified just
-# by its name or with the correct syntax as to be used with the LaTeX
-# \usepackage command. To get the times font for instance you can specify :
-# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times}
-# To use the option intlimits with the amsmath package you can specify:
-# EXTRA_PACKAGES=[intlimits]{amsmath}
-# If left blank no extra packages will be included.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-EXTRA_PACKAGES         =
-
-# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
-# generated LaTeX document. The header should contain everything until the first
-# chapter. If it is left blank doxygen will generate a standard header. See
-# section "Doxygen usage" for information on how to let doxygen write the
-# default header to a separate file.
-#
-# Note: Only use a user-defined header if you know what you are doing! The
-# following commands have a special meaning inside the header: $title,
-# $datetime, $date, $doxygenversion, $projectname, $projectnumber,
-# $projectbrief, $projectlogo. Doxygen will replace $title with the empty
-# string, for the replacement values of the other commands the user is referred
-# to HTML_HEADER.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HEADER           =
-
-# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
-# generated LaTeX document. The footer should contain everything after the last
-# chapter. If it is left blank doxygen will generate a standard footer. See
-# LATEX_HEADER for more information on how to generate a default footer and what
-# special commands can be used inside the footer.
-#
-# Note: Only use a user-defined footer if you know what you are doing!
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_FOOTER           =
-
-# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined
-# LaTeX style sheets that are included after the standard style sheets created
-# by doxygen. Using this option one can overrule certain style aspects. Doxygen
-# will copy the style sheet files to the output directory.
-# Note: The order of the extra style sheet files is of importance (e.g. the last
-# style sheet in the list overrules the setting of the previous ones in the
-# list).
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_EXTRA_STYLESHEET =
-
-# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the LATEX_OUTPUT output
-# directory. Note that the files will be copied as-is; there are no commands or
-# markers available.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_EXTRA_FILES      =
-
-# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
-# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
-# contain links (just like the HTML output) instead of page references. This
-# makes the output suitable for online browsing using a PDF viewer.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PDF_HYPERLINKS         = YES
-
-# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
-# the PDF file directly from the LaTeX files. Set this option to YES, to get a
-# higher quality PDF documentation.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-USE_PDFLATEX           = YES
-
-# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
-# command to the generated LaTeX files. This will instruct LaTeX to keep running
-# if errors occur, instead of asking the user for help. This option is also used
-# when generating formulas in HTML.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BATCHMODE        = NO
-
-# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
-# index chapters (such as File Index, Compound Index, etc.) in the output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HIDE_INDICES     = NO
-
-# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
-# code with syntax highlighting in the LaTeX output.
-#
-# Note that which sources are shown also depends on other settings such as
-# SOURCE_BROWSER.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_SOURCE_CODE      = NO
-
-# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
-# bibliography, e.g. plainnat, or ieeetr. See
-# https://en.wikipedia.org/wiki/BibTeX and \cite for more info.
-# The default value is: plain.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BIB_STYLE        = plain
-
-# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated
-# page will contain the date and time when the page was generated. Setting this
-# to NO can help when comparing the output of multiple runs.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_TIMESTAMP        = NO
-
-# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute)
-# path from which the emoji images will be read. If a relative path is entered,
-# it will be relative to the LATEX_OUTPUT directory. If left blank the
-# LATEX_OUTPUT directory will be used.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_EMOJI_DIRECTORY  =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the RTF output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The
-# RTF output is optimized for Word 97 and may not look too pretty with other RTF
-# readers/editors.
-# The default value is: NO.
-
-GENERATE_RTF           = NO
-
-# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: rtf.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_OUTPUT             = rtf
-
-# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-COMPACT_RTF            = NO
-
-# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
-# contain hyperlink fields. The RTF file will contain links (just like the HTML
-# output) instead of page references. This makes the output suitable for online
-# browsing using Word or some other Word compatible readers that support those
-# fields.
-#
-# Note: WordPad (write) and others do not support links.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_HYPERLINKS         = NO
-
-# Load stylesheet definitions from file. Syntax is similar to doxygen's
-# configuration file, i.e. a series of assignments. You only have to provide
-# replacements, missing definitions are set to their default value.
-#
-# See also section "Doxygen usage" for information on how to generate the
-# default style sheet that doxygen normally uses.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_STYLESHEET_FILE    =
-
-# Set optional variables used in the generation of an RTF document. Syntax is
-# similar to doxygen's configuration file. A template extensions file can be
-# generated using doxygen -e rtf extensionFile.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_EXTENSIONS_FILE    =
-
-# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code
-# with syntax highlighting in the RTF output.
-#
-# Note that which sources are shown also depends on other settings such as
-# SOURCE_BROWSER.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_SOURCE_CODE        = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the man page output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for
-# classes and files.
-# The default value is: NO.
-
-GENERATE_MAN           = NO
-
-# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it. A directory man3 will be created inside the directory specified by
-# MAN_OUTPUT.
-# The default directory is: man.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_OUTPUT             = man
-
-# The MAN_EXTENSION tag determines the extension that is added to the generated
-# man pages. In case the manual section does not start with a number, the number
-# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
-# optional.
-# The default value is: .3.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_EXTENSION          = .3
-
-# The MAN_SUBDIR tag determines the name of the directory created within
-# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
-# MAN_EXTENSION with the initial . removed.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_SUBDIR             =
-
-# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
-# will generate one additional man file for each entity documented in the real
-# man page(s). These additional files only source the real man page, but without
-# them the man command would be unable to find the correct page.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_LINKS              = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the XML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that
-# captures the structure of the code including all documentation.
-# The default value is: NO.
-
-GENERATE_XML           = YES
-
-# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: xml.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_OUTPUT             = xml
-
-# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program
-# listings (including syntax highlighting and cross-referencing information) to
-# the XML output. Note that enabling this will significantly increase the size
-# of the XML output.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_PROGRAMLISTING     = YES
-
-# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include
-# namespace members in file scope as well, matching the HTML output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_NS_MEMB_FILE_SCOPE = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the DOCBOOK output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files
-# that can be used to generate PDF.
-# The default value is: NO.
-
-GENERATE_DOCBOOK       = NO
-
-# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
-# front of it.
-# The default directory is: docbook.
-# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
-
-DOCBOOK_OUTPUT         = docbook
-
-# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the
-# program listings (including syntax highlighting and cross-referencing
-# information) to the DOCBOOK output. Note that enabling this will significantly
-# increase the size of the DOCBOOK output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
-
-DOCBOOK_PROGRAMLISTING = NO
-
-#---------------------------------------------------------------------------
-# Configuration options for the AutoGen Definitions output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
-# AutoGen Definitions (see http://autogen.sourceforge.net/) file that captures
-# the structure of the code including all documentation. Note that this feature
-# is still experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_AUTOGEN_DEF   = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the Perl module output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module
-# file that captures the structure of the code including all documentation.
-#
-# Note that this feature is still experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_PERLMOD       = NO
-
-# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary
-# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
-# output from the Perl module output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_LATEX          = NO
-
-# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely
-# formatted so it can be parsed by a human reader. This is useful if you want to
-# understand what is going on. On the other hand, if this tag is set to NO, the
-# size of the Perl module output will be much smaller and Perl will parse it
-# just the same.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_PRETTY         = YES
-
-# The names of the make variables in the generated doxyrules.make file are
-# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
-# so different doxyrules.make files included by the same Makefile don't
-# overwrite each other's variables.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_MAKEVAR_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the preprocessor
-#---------------------------------------------------------------------------
-
-# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all
-# C-preprocessor directives found in the sources and include files.
-# The default value is: YES.
-
-ENABLE_PREPROCESSING   = YES
-
-# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
-# in the source code. If set to NO, only conditional compilation will be
-# performed. Macro expansion can be done in a controlled way by setting
-# EXPAND_ONLY_PREDEF to YES.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-MACRO_EXPANSION        = YES
-
-# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
-# the macro expansion is limited to the macros specified with the PREDEFINED and
-# EXPAND_AS_DEFINED tags.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_ONLY_PREDEF     = YES
-
-# If the SEARCH_INCLUDES tag is set to YES, the include files in the
-# INCLUDE_PATH will be searched if a #include is found.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SEARCH_INCLUDES        = YES
-
-# The INCLUDE_PATH tag can be used to specify one or more directories that
-# contain include files that are not input files but should be processed by the
-# preprocessor.
-# This tag requires that the tag SEARCH_INCLUDES is set to YES.
-
-INCLUDE_PATH           =
-
-# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
-# patterns (like *.h and *.hpp) to filter out the header-files in the
-# directories. If left blank, the patterns specified with FILE_PATTERNS will be
-# used.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-INCLUDE_FILE_PATTERNS  =
-
-# The PREDEFINED tag can be used to specify one or more macro names that are
-# defined before the preprocessor is started (similar to the -D option of e.g.
-# gcc). The argument of the tag is a list of macros of the form: name or
-# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
-# is assumed. To prevent a macro definition from being undefined via #undef or
-# recursively expanded use the := operator instead of the = operator.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-PREDEFINED             = __attribute__(x)= \
-                         __declspec(x)= \
-                         PARQUET_EXPORT= \
-                         ARROW_EXPORT= \
-                         ARROW_DS_EXPORT= \
-                         ARROW_FLIGHT_EXPORT= \
-                         ARROW_EXTERN_TEMPLATE= \
-                         ARROW_DEPRECATED(x)=
-
-# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
-# tag can be used to specify a list of macro names that should be expanded. The
-# macro definition that is found in the sources will be used. Use the PREDEFINED
-# tag if you want to use a different macro definition that overrules the
-# definition found in the source code.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_AS_DEFINED      =
-
-# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
-# remove all references to function-like macros that are alone on a line, have
-# an all uppercase name, and do not end with a semicolon. Such function macros
-# are typically used for boiler-plate code, and will confuse the parser if not
-# removed.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SKIP_FUNCTION_MACROS   = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to external references
-#---------------------------------------------------------------------------
-
-# The TAGFILES tag can be used to specify one or more tag files. For each tag
-# file the location of the external documentation should be added. The format of
-# a tag file without this location is as follows:
-# TAGFILES = file1 file2 ...
-# Adding location for the tag files is done as follows:
-# TAGFILES = file1=loc1 "file2 = loc2" ...
-# where loc1 and loc2 can be relative or absolute paths or URLs. See the
-# section "Linking to external documentation" for more information about the use
-# of tag files.
-# Note: Each tag file must have a unique name (where the name does NOT include
-# the path). If a tag file is not located in the directory in which doxygen is
-# run, you must also specify the path to the tagfile here.
-
-TAGFILES               =
-
-# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
-# tag file that is based on the input files it reads. See section "Linking to
-# external documentation" for more information about the usage of tag files.
-
-GENERATE_TAGFILE       =
-
-# If the ALLEXTERNALS tag is set to YES, all external class will be listed in
-# the class index. If set to NO, only the inherited external classes will be
-# listed.
-# The default value is: NO.
-
-ALLEXTERNALS           = NO
-
-# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
-# in the modules index. If set to NO, only the current project's groups will be
-# listed.
-# The default value is: YES.
-
-EXTERNAL_GROUPS        = YES
-
-# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in
-# the related pages index. If set to NO, only the current project's pages will
-# be listed.
-# The default value is: YES.
-
-EXTERNAL_PAGES         = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the dot tool
-#---------------------------------------------------------------------------
-
-# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram
-# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
-# NO turns the diagrams off. Note that this option also works with HAVE_DOT
-# disabled, but it is recommended to install and use dot, since it yields more
-# powerful graphs.
-# The default value is: YES.
-
-CLASS_DIAGRAMS         = YES
-
-# You can include diagrams made with dia in doxygen documentation. Doxygen will
-# then run dia to produce the diagram and insert it in the documentation. The
-# DIA_PATH tag allows you to specify the directory where the dia binary resides.
-# If left empty dia is assumed to be found in the default search path.
-
-DIA_PATH               =
-
-# If set to YES the inheritance and collaboration graphs will hide inheritance
-# and usage relations if the target is undocumented or is not a class.
-# The default value is: YES.
-
-HIDE_UNDOC_RELATIONS   = YES
-
-# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
-# available from the path. This tool is part of Graphviz (see:
-# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
-# Bell Labs. The other options in this section have no effect if this option is
-# set to NO
-# The default value is: NO.
-
-HAVE_DOT               = NO
-
-# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
-# to run in parallel. When set to 0 doxygen will base this on the number of
-# processors available in the system. You can set it explicitly to a value
-# larger than 0 to get control over the balance between CPU load and processing
-# speed.
-# Minimum value: 0, maximum value: 32, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_NUM_THREADS        = 0
-
-# When you want a differently looking font in the dot files that doxygen
-# generates you can specify the font name using DOT_FONTNAME. You need to make
-# sure dot is able to find the font, which can be done by putting it in a
-# standard location or by setting the DOTFONTPATH environment variable or by
-# setting DOT_FONTPATH to the directory containing the font.
-# The default value is: Helvetica.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTNAME           = Helvetica
-
-# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
-# dot graphs.
-# Minimum value: 4, maximum value: 24, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTSIZE           = 10
-
-# By default doxygen will tell dot to use the default font as specified with
-# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
-# the path where dot can find it using this tag.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTPATH           =
-
-# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
-# each documented class showing the direct and indirect inheritance relations.
-# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CLASS_GRAPH            = YES
-
-# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
-# graph for each documented class showing the direct and indirect implementation
-# dependencies (inheritance, containment, and class references variables) of the
-# class with other documented classes.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-COLLABORATION_GRAPH    = YES
-
-# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
-# groups, showing the direct groups dependencies.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GROUP_GRAPHS           = YES
-
-# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and
-# collaboration diagrams in a style similar to the OMG's Unified Modeling
-# Language.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LOOK               = NO
-
-# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
-# class node. If there are many fields or methods and many nodes the graph may
-# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
-# number of items for each type to make the size more manageable. Set this to 0
-# for no limit. Note that the threshold may be exceeded by 50% before the limit
-# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
-# but if the number exceeds 15, the total amount of fields shown is limited to
-# 10.
-# Minimum value: 0, maximum value: 100, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LIMIT_NUM_FIELDS   = 10
-
-# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
-# collaboration graphs will show the relations between templates and their
-# instances.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-TEMPLATE_RELATIONS     = NO
-
-# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
-# YES then doxygen will generate a graph for each documented file showing the
-# direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDE_GRAPH          = YES
-
-# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
-# set to YES then doxygen will generate a graph for each documented file showing
-# the direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDED_BY_GRAPH      = YES
-
-# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable call graphs for selected
-# functions only using the \callgraph command. Disabling a call graph can be
-# accomplished by means of the command \hidecallgraph.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALL_GRAPH             = NO
-
-# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable caller graphs for selected
-# functions only using the \callergraph command. Disabling a caller graph can be
-# accomplished by means of the command \hidecallergraph.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALLER_GRAPH           = NO
-
-# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
-# hierarchy of all classes instead of a textual one.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GRAPHICAL_HIERARCHY    = YES
-
-# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
-# dependencies a directory has on other directories in a graphical way. The
-# dependency relations are determined by the #include relations between the
-# files in the directories.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DIRECTORY_GRAPH        = YES
-
-# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
-# generated by dot. For an explanation of the image formats see the section
-# output formats in the documentation of the dot tool (Graphviz (see:
-# http://www.graphviz.org/)).
-# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
-# to make the SVG files visible in IE 9+ (other browsers do not have this
-# requirement).
-# Possible values are: png, jpg, gif, svg, png:gd, png:gd:gd, png:cairo,
-# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and
-# png:gdiplus:gdiplus.
-# The default value is: png.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_IMAGE_FORMAT       = png
-
-# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
-# enable generation of interactive SVG images that allow zooming and panning.
-#
-# Note that this requires a modern browser other than Internet Explorer. Tested
-# and working are Firefox, Chrome, Safari, and Opera.
-# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
-# the SVG files visible. Older versions of IE do not have SVG support.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INTERACTIVE_SVG        = NO
-
-# The DOT_PATH tag can be used to specify the path where the dot tool can be
-# found. If left blank, it is assumed the dot tool can be found in the path.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_PATH               =
-
-# The DOTFILE_DIRS tag can be used to specify one or more directories that
-# contain dot files that are included in the documentation (see the \dotfile
-# command).
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOTFILE_DIRS           =
-
-# The MSCFILE_DIRS tag can be used to specify one or more directories that
-# contain msc files that are included in the documentation (see the \mscfile
-# command).
-
-MSCFILE_DIRS           =
-
-# The DIAFILE_DIRS tag can be used to specify one or more directories that
-# contain dia files that are included in the documentation (see the \diafile
-# command).
-
-DIAFILE_DIRS           =
-
-# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
-# path where java can find the plantuml.jar file. If left blank, it is assumed
-# PlantUML is not used or called during a preprocessing step. Doxygen will
-# generate a warning when it encounters a \startuml command in this case and
-# will not generate output for the diagram.
-
-PLANTUML_JAR_PATH      =
-
-# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a
-# configuration file for plantuml.
-
-PLANTUML_CFG_FILE      =
-
-# When using plantuml, the specified paths are searched for files specified by
-# the !include statement in a plantuml block.
-
-PLANTUML_INCLUDE_PATH  =
-
-# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
-# that will be shown in the graph. If the number of nodes in a graph becomes
-# larger than this value, doxygen will truncate the graph, which is visualized
-# by representing a node as a red box. Note that doxygen if the number of direct
-# children of the root node in a graph is already larger than
-# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
-# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
-# Minimum value: 0, maximum value: 10000, default value: 50.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_GRAPH_MAX_NODES    = 50
-
-# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
-# generated by dot. A depth value of 3 means that only nodes reachable from the
-# root by following a path via at most 3 edges will be shown. Nodes that lay
-# further from the root node will be omitted. Note that setting this option to 1
-# or 2 may greatly reduce the computation time needed for large code bases. Also
-# note that the size of a graph can be further restricted by
-# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
-# Minimum value: 0, maximum value: 1000, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-MAX_DOT_GRAPH_DEPTH    = 0
-
-# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
-# background. This is disabled by default, because dot on Windows does not seem
-# to support this out of the box.
-#
-# Warning: Depending on the platform used, enabling this option may lead to
-# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
-# read).
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_TRANSPARENT        = NO
-
-# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output
-# files in one run (i.e. multiple -o and -T options on the command line). This
-# makes dot run faster, but since only newer versions of dot (>1.8.10) support
-# this, this feature is disabled by default.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_MULTI_TARGETS      = NO
-
-# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
-# explaining the meaning of the various boxes and arrows in the dot generated
-# graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GENERATE_LEGEND        = YES
-
-# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
-# files that are used to generate the various graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_CLEANUP            = YES
diff --git a/cpp/apidoc/HDFS.md b/cpp/apidoc/HDFS.md
deleted file mode 100644
index d3671fb..0000000
--- a/cpp/apidoc/HDFS.md
+++ /dev/null
@@ -1,83 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-## Using Arrow's HDFS (Apache Hadoop Distributed File System) interface
-
-### Build requirements
-
-To build the integration, pass the following option to CMake
-
-```shell
--DARROW_HDFS=on
-```
-
-For convenience, we have bundled `hdfs.h` for libhdfs from Apache Hadoop in
-Arrow's thirdparty. If you wish to build against the `hdfs.h` in your installed
-Hadoop distribution, set the `$HADOOP_HOME` environment variable.
-
-### Runtime requirements
-
-By default, the HDFS client C++ class in `libarrow_io` uses the libhdfs JNI
-interface to the Java Hadoop client. This library is loaded **at runtime**
-(rather than at link / library load time, since the library may not be in your
-LD_LIBRARY_PATH), and relies on some environment variables.
-
-* `HADOOP_HOME`: the root of your installed Hadoop distribution. Often has
-`lib/native/libhdfs.so`.
-* `JAVA_HOME`: the location of your Java SDK installation.
-* `CLASSPATH`: must contain the Hadoop jars. You can set these using:
-
-```shell
-export CLASSPATH=`$HADOOP_HOME/bin/hadoop classpath --glob`
-```
-
-* `ARROW_LIBHDFS_DIR` (optional): explicit location of `libhdfs.so` if it is
-installed somewhere other than `$HADOOP_HOME/lib/native`.
-
-To accommodate distribution-specific nuances, the `JAVA_HOME` variable may be
-set to the root path for the Java SDK, the JRE path itself, or to the directory
-containing the `libjvm` library.
-
-### Mac Specifics
-
-The installed location of Java on OS X can vary, however the following snippet
-will set it automatically for you:
-
-```shell
-export JAVA_HOME=$(/usr/libexec/java_home)
-```
-
-Homebrew's Hadoop does not have native libs. Apache doesn't build these, so
-users must build Hadoop to get the native libs. See this Stack Overflow
-answer for details:
-
-http://stackoverflow.com/a/40051353/478288
-
-Be sure to include the path to the native libs in `JAVA_LIBRARY_PATH`:
-
-```shell
-export JAVA_LIBRARY_PATH=$HADOOP_HOME/lib/native:$JAVA_LIBRARY_PATH
-```
-
-If you get an error about needing to install Java 6, then add *BundledApp* and
-*JNI* to the `JVMCapabilities` in `$JAVA_HOME/../Info.plist`. See
-
-https://oliverdowling.com.au/2015/10/09/oracles-jre-8-on-mac-os-x-el-capitan/
-
-https://derflounder.wordpress.com/2015/08/08/modifying-oracles-java-sdk-to-run-java-applications-on-os-x/
diff --git a/cpp/apidoc/footer.html b/cpp/apidoc/footer.html
deleted file mode 100644
index 01f4ad2..0000000
--- a/cpp/apidoc/footer.html
+++ /dev/null
@@ -1,31 +0,0 @@
-<!-- HTML footer for doxygen 1.8.14-->
-<!-- start footer part -->
-<!--BEGIN GENERATE_TREEVIEW-->
-<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
-  <ul>
-    $navpath
-    <li class="footer">$generatedby
-    <a href="http://www.doxygen.org/index.html">
-    <img class="footer" src="$relpath^doxygen.png" alt="doxygen"/></a> $doxygenversion </li>
-  </ul>
-</div>
-<!--END GENERATE_TREEVIEW-->
-<!--BEGIN !GENERATE_TREEVIEW-->
-<hr class="footer"/><address class="footer"><small>
-$generatedby &#160;<a href="http://www.doxygen.org/index.html">
-<img class="footer" src="$relpath^doxygen.png" alt="doxygen"/>
-</a> $doxygenversion
-</small></address>
-<!--END !GENERATE_TREEVIEW-->
-
-<script async src="https://www.googletagmanager.com/gtag/js?id=UA-107500873-1"></script>
-<script>
-  window.dataLayer = window.dataLayer || [];
-  function gtag(){dataLayer.push(arguments);}
-  gtag('js', new Date());
-
-  gtag('config', 'UA-107500873-1');
-</script>
-
-</body>
-</html>
diff --git a/cpp/apidoc/tutorials/plasma.md b/cpp/apidoc/tutorials/plasma.md
deleted file mode 100644
index fef4522..0000000
--- a/cpp/apidoc/tutorials/plasma.md
+++ /dev/null
@@ -1,450 +0,0 @@
-<!---
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License. See accompanying LICENSE file.
--->
-
-Using the Plasma In-Memory Object Store from C++
-================================================
-
-Apache Arrow offers the ability to share your data structures among multiple
-processes simultaneously through Plasma, an in-memory object store.
-
-Note that **the Plasma API is not stable**.
-
-Plasma clients are processes that run on the same machine as the object store.
-They communicate with the object store over Unix domain sockets, and they read
-and write data in the object store through shared memory.
-
-Plasma objects are immutable once they have been created.
-
-The following goes over the basics so you can begin using Plasma in your big
-data applications.
-
-Starting the Plasma store
--------------------------
-
-To start running the Plasma object store so that clients may
-connect and access the data, run the following command:
-
-```
-plasma_store_server -m 1000000000 -s /tmp/plasma
-```
-
-The `-m` flag specifies the size of the object store in bytes. The `-s` flag
-specifies the path of the Unix domain socket that the store will listen at.
-
-Therefore, the above command initializes a Plasma store up to 1 GB of memory
-and sets the socket to `/tmp/plasma.`
-
-The Plasma store will remain available as long as the `plasma_store_server` process is
-running in a terminal window. Messages, such as alerts for disconnecting
-clients, may occasionally be output. To stop running the Plasma store, you
-can press `Ctrl-C` in the terminal window.
-
-Alternatively, you can run the Plasma store in the background and ignore all
-message output with the following terminal command:
-
-```
-plasma_store_server -m 1000000000 -s /tmp/plasma 1> /dev/null 2> /dev/null &
-```
-
-The Plasma store will instead run silently in the background. To stop running
-the Plasma store in this case, issue the command below:
-
-```
-killall plasma_store_server
-```
-
-Creating a Plasma client
-------------------------
-
-Now that the Plasma object store is up and running, it is time to make a client
-process connect to it. To use the Plasma object store as a client, your
-application should initialize a `plasma::PlasmaClient` object and tell it to
-connect to the socket specified when starting up the Plasma object store.
-
-```cpp
-#include <plasma/client.h>
-
-using namespace plasma;
-
-int main(int argc, char** argv) {
-  // Start up and connect a Plasma client.
-  PlasmaClient client;
-  ARROW_CHECK_OK(client.Connect("/tmp/plasma"));
-  // Disconnect the Plasma client.
-  ARROW_CHECK_OK(client.Disconnect());
-}
-```
-
-Save this program in a file `test.cc` and compile it with
-
-```
-g++ test.cc `pkg-config --cflags --libs plasma` --std=c++11
-```
-
-Note that multiple clients can be created within the same process.
-
-If the Plasma store is still running, you can now execute the `a.out` executable
-and the store will print something like
-
-```
-Disconnecting client on fd 5
-```
-
-which shows that the client was successfully disconnected.
-
-Object IDs
-----------
-
-The Plasma object store uses twenty-byte identifiers for accessing objects
-stored in shared memory. Each object in the Plasma store should be associated
-with a unique ID. The Object ID is then a key that can be used by **any** client
-to fetch that object from the Plasma store.
-
-Random generation of Object IDs is often good enough to ensure unique IDs.
-For test purposes, you can use the function `random_object_id` from the header
-`plasma/test-util.h` to generate random Object IDs, which uses a global random
-number generator. In your own applications, we recommend to generate a string of
-`ObjectID::size()` many random bytes using your own random number generator
-and pass them to `ObjectID::from_bytes` to generate the ObjectID.
-
-```cpp
-#include <plasma/test-util.h>
-
-// Randomly generate an Object ID.
-ObjectID object_id = random_object_id();
-```
-
-Now, any connected client that knows the object's Object ID can access the
-same object from the Plasma object store. For easy transportation of Object IDs,
-you can convert/serialize an Object ID into a binary string and back as
-follows:
-
-```cpp
-// From ObjectID to binary string
-std:string id_string = object_id.binary();
-
-// From binary string to ObjectID
-ObjectID id_object = ObjectID::from_binary(&id_string);
-```
-
-You can also get a human readable representation of ObjectIDs in the same
-format that git uses for commit hashes by running `ObjectID::hex`.
-
-Here is a test program you can run:
-
-```cpp
-#include <iostream>
-#include <string>
-#include <plasma/client.h>
-#include <plasma/test-util.h>
-
-using namespace plasma;
-
-int main(int argc, char** argv) {
-  ObjectID object_id1 = random_object_id();
-  std::cout << "object_id1 is " << object_id1.hex() << std::endl;
-
-  std::string id_string = object_id1.binary();
-  ObjectID object_id2 = ObjectID::from_binary(id_string);
-  std::cout << "object_id2 is " << object_id2.hex() << std::endl;
-}
-```
-
-Creating an Object
-------------------
-
-Now that you learned about Object IDs that are used to refer to objects,
-let's look at how objects can be stored in Plasma.
-
-Storing objects is a two-stage process. First a buffer is allocated with a call
-to `Create`. Then it can be constructed in place by the client. Then it is made
-immutable and shared with other clients via a call to `Seal`.
-
-The `Create` call blocks while the Plasma store allocates a buffer of the
-appropriate size. The client will then map the buffer into its own address
-space. At this point the object can be constructed in place using a pointer that
-was written by the `Create` command.
-
-```cpp
-int64_t data_size = 100;
-// The address of the buffer allocated by the Plasma store will be written at
-// this address.
-std::shared_ptr<Buffer> data;
-// Create a Plasma object by specifying its ID and size.
-ARROW_CHECK_OK(client.Create(object_id, data_size, NULL, 0, &data));
-```
-
-You can also specify metadata for the object; the third argument is the
-metadata (as raw bytes) and the fourth argument is the size of the metadata.
-
-```cpp
-// Create a Plasma object with metadata.
-int64_t data_size = 100;
-std::string metadata = "{'author': 'john'}";
-std::shared_ptr<Buffer> data;
-client.Create(object_id, data_size, (uint8_t*) metadata.data(), metadata.size(), &data);
-```
-
-Now that we've obtained a pointer to our object's data, we can
-write our data to it:
-
-```cpp
-// Write some data for the Plasma object.
-for (int64_t i = 0; i < data_size; i++) {
-    data[i] = static_cast<uint8_t>(i % 4);
-}
-```
-
-When the client is done, the client **seals** the buffer, making the object
-immutable, and making it available to other Plasma clients:
-
-```cpp
-// Seal the object. This makes it available for all clients.
-client.Seal(object_id);
-```
-
-Here is an example that combines all these features:
-
-```cpp
-#include <plasma/client.h>
-
-using namespace plasma;
-
-int main(int argc, char** argv) {
-  // Start up and connect a Plasma client.
-  PlasmaClient client;
-  ARROW_CHECK_OK(client.Connect("/tmp/plasma"));
-  // Create an object with a fixed ObjectID.
-  ObjectID object_id = ObjectID::from_binary("00000000000000000000");
-  int64_t data_size = 1000;
-  std::shared_ptr<Buffer> data;
-  std::string metadata = "{'author': 'john'}";
-  ARROW_CHECK_OK(client.Create(object_id, data_size, (uint8_t*) metadata.data(), metadata.size(), &data));
-  // Write some data into the object.
-  auto d = data->mutable_data();
-  for (int64_t i = 0; i < data_size; i++) {
-    d[i] = static_cast<uint8_t>(i % 4);
-  }
-  // Seal the object.
-  ARROW_CHECK_OK(client.Seal(object_id));
-  // Disconnect the client.
-  ARROW_CHECK_OK(client.Disconnect());
-}
-```
-
-This example can be compiled with
-
-```
-g++ create.cc `pkg-config --cflags --libs plasma` --std=c++11 -o create
-```
-
-To verify that an object exists in the Plasma object store, you can
-call `PlasmaClient::Contains()` to check if an object has
-been created and sealed for a given Object ID. Note that this function
-will still return False if the object has been created, but not yet
-sealed:
-
-```cpp
-// Check if an object has been created and sealed.
-bool has_object;
-client.Contains(object_id, &has_object);
-if (has_object) {
-    // Object has been created and sealed, proceed
-}
-```
-
-Getting an Object
------------------
-
-After an object has been sealed, any client who knows the Object ID can get
-the object. To store the retrieved object contents, you should create an
-`ObjectBuffer`, then call `PlasmaClient::Get()` as follows:
-
-```cpp
-// Get from the Plasma store by Object ID.
-ObjectBuffer object_buffer;
-client.Get(&object_id, 1, -1, &object_buffer);
-```
-
-`PlasmaClient::Get()` isn't limited to fetching a single object
-from the Plasma store at once. You can specify an array of Object IDs and
-`ObjectBuffers` to fetch at once, so long as you also specify the
-number of objects being fetched:
-
-```cpp
-// Get two objects at once from the Plasma store. This function
-// call will block until both objects have been fetched.
-ObjectBuffer multiple_buffers[2];
-ObjectID multiple_ids[2] = {object_id1, object_id2};
-client.Get(multiple_ids, 2, -1, multiple_buffers);
-```
-
-Since `PlasmaClient::Get()` is a blocking function call, it may be
-necessary to limit the amount of time the function is allowed to take
-when trying to fetch from the Plasma store. You can pass in a timeout
-in milliseconds when calling `PlasmaClient::Get().` To use `PlasmaClient::Get()`
-without a timeout, just pass in -1 like in the previous example calls:
-
-```cpp
-// Make the function call give up fetching the object if it takes
-// more than 100 milliseconds.
-int64_t timeout = 100;
-client.Get(&object_id, 1, timeout, &object_buffer);
-```
-
-Finally, to access the object, you can access the `data` and
-`metadata` attributes of the `ObjectBuffer`. The `data` can be indexed
-like any array:
-
-```cpp
-// Access object data.
-uint8_t* data = object_buffer.data;
-int64_t data_size = object_buffer.data_size;
-
-// Access object metadata.
-uint8_t* metadata = object_buffer.metadata;
-uint8_t metadata_size = object_buffer.metadata_size;
-
-// Index into data array.
-uint8_t first_data_byte = data[0];
-```
-
-Here is a longer example that shows these capabilities:
-
-```cpp
-#include <plasma/client.h>
-
-using namespace plasma;
-
-int main(int argc, char** argv) {
-  // Start up and connect a Plasma client.
-  PlasmaClient client;
-  ARROW_CHECK_OK(client.Connect("/tmp/plasma"));
-  ObjectID object_id = ObjectID::from_binary("00000000000000000000");
-  ObjectBuffer object_buffer;
-  ARROW_CHECK_OK(client.Get(&object_id, 1, -1, &object_buffer));
-
-  // Retrieve object data.
-  auto buffer = object_buffer.data;
-  const uint8_t* data = buffer->data();
-  int64_t data_size = buffer->size();
-
-  // Check that the data agrees with what was written in the other process.
-  for (int64_t i = 0; i < data_size; i++) {
-    ARROW_CHECK(data[i] == static_cast<uint8_t>(i % 4));
-  }
-
-  // Disconnect the client.
-  ARROW_CHECK_OK(client.Disconnect());
-}
-```
-
-If you compile it with
-
-```
-g++ get.cc `pkg-config --cflags --libs plasma` --std=c++11 -o get
-```
-
-and run it with `./get`, all the assertions will pass if you run the `create`
-example from above on the same Plasma store.
-
-
-Object Lifetime Management
---------------------------
-
-The Plasma store internally does reference counting to make sure objects that
-are mapped into the address space of one of the clients with `PlasmaClient::Get`
-are accessible. To unmap objects from a client, call `PlasmaClient::Release`.
-All objects that are mapped into a clients address space will automatically
-be released when the client is disconnected from the store (this happens even
-if the client process crashes or otherwise fails to call `Disconnect`).
-
-If a new object is created and there is not enough space in the Plasma store,
-the store will evict the least recently used object (an object is in use if at
-least one client has gotten it but not released it).
-
-Object notifications
---------------------
-
-Additionally, you can arrange to have Plasma notify you when objects are
-sealed in the object store. This may especially be handy when your
-program is collaborating with other Plasma clients, and needs to know
-when they make objects available.
-
-First, you can subscribe your current Plasma client to such notifications
-by getting a file descriptor:
-
-```cpp
-// Start receiving notifications into file_descriptor.
-int fd;
-ARROW_CHECK_OK(client.Subscribe(&fd));
-```
-
-Once you have the file descriptor, you can have your current Plasma client
-wait to receive the next object notification. Object notifications
-include information such as Object ID, data size, and metadata size of
-the next newly available object:
-
-```cpp
-// Receive notification of the next newly available object.
-// Notification information is stored in object_id, data_size, and metadata_size
-ObjectID object_id;
-int64_t data_size;
-int64_t metadata_size;
-ARROW_CHECK_OK(client.GetNotification(fd, &object_id, &data_size, &metadata_size));
-
-// Get the newly available object.
-ObjectBuffer object_buffer;
-ARROW_CHECK_OK(client.Get(&object_id, 1, -1, &object_buffer));
-```
-
-Here is a full program that shows this capability:
-
-```cpp
-#include <plasma/client.h>
-
-using namespace plasma;
-
-int main(int argc, char** argv) {
-  // Start up and connect a Plasma client.
-  PlasmaClient client;
-  ARROW_CHECK_OK(client.Connect("/tmp/plasma"));
-
-  int fd;
-  ARROW_CHECK_OK(client.Subscribe(&fd));
-
-  ObjectID object_id;
-  int64_t data_size;
-  int64_t metadata_size;
-  while (true) {
-    ARROW_CHECK_OK(client.GetNotification(fd, &object_id, &data_size, &metadata_size));
-
-    std::cout << "Received object notification for object_id = "
-              << object_id.hex() << ", with data_size = " << data_size
-              << ", and metadata_size = " << metadata_size << std::endl;
-  }
-
-  // Disconnect the client.
-  ARROW_CHECK_OK(client.Disconnect());
-}
-```
-
-If you compile it with
-
-```
-g++ subscribe.cc `pkg-config --cflags --libs plasma` --std=c++11 -o subscribe
-```
-
-and invoke `./create` and `./subscribe` while the Plasma store is running,
-you can observe the new object arriving.
diff --git a/cpp/apidoc/tutorials/tensor_to_py.md b/cpp/apidoc/tutorials/tensor_to_py.md
deleted file mode 100644
index cd191fe..0000000
--- a/cpp/apidoc/tutorials/tensor_to_py.md
+++ /dev/null
@@ -1,127 +0,0 @@
-<!---
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License. See accompanying LICENSE file.
--->
-
-Use Plasma to Access Tensors from C++ in Python
-==============================================
-
-This short tutorial shows how to use Arrow and the Plasma Store to send data
-from C++ to Python.
-
-In detail, we will show how to:
-1. Serialize a floating-point array in C++ into an Arrow tensor
-2. Save the Arrow tensor to Plasma
-3. Access the Tensor in a Python process
-
-This approach has the advantage that multiple python processes can all read
-the tensor with zero-copy. Therefore, only one copy is necessary when we send
-a tensor from one C++ process to many python processes.
-
-
-Step 0: Set up
-------
-We will include the following header files and construct a Plasma client.
-
-```cpp
-#include <plasma/client.h>
-#include <arrow/tensor.h>
-#include <arrow/array.h>
-#include <arrow/buffer.h>
-#include <arrow/io/memory.h>
-#include <arrow/ipc/writer.h>
-
-PlasmaClient client_;
-ARROW_CHECK_OK(client_.Connect("/tmp/plasma", "", 0));
-```
-
-
-Step 1: Serialize a floating point array in C++ into an Arrow Tensor
---------------------------------------------------------------------
-In this step, we will construct a floating-point array in C++.
-
-```cpp
-// Generate an Object ID for Plasma
-ObjectID object_id = ObjectID::from_binary("11111111111111111111");
-
-// Generate Float Array
-int64_t input_length = 1000;
-std::vector<float> input(input_length);
-for (int64_t i = 0; i < input_length; ++i) {
-  input[i] = 2.0;
-}
-
-// Create Arrow Tensor Object, no copy made!
-// {input_length} is the shape of the tensor
-auto value_buffer = Buffer::Wrap<float>(input);
-Tensor t(float32(), value_buffer, {input_length});
-```
-
-Step 2: Save the Arrow Tensor to Plasma In-Memory Object Store
---------------------------------------------------------------
-Continuing from Step 1, this step will save the tensor to Plasma Store. We
-use `arrow::ipc::WriteTensor` to write the data.
-
-The variable `meta_len` will contain the length of the tensor metadata
-after the call to `arrow::ipc::WriteTensor`.
-
-```cpp
-// Get the size of the tensor to be stored in Plasma
-int64_t datasize;
-ARROW_CHECK_OK(ipc::GetTensorSize(t, &datasize));
-int32_t meta_len = 0;
-
-// Create the Plasma Object
-// Plasma is responsible for initializing and resizing the buffer
-// This buffer will contain the _serialized_ tensor
-std::shared_ptr<Buffer> buffer;
-ARROW_CHECK_OK(
-    client_.Create(object_id, datasize, NULL, 0, &buffer));
-
-// Writing Process, this will copy the tensor into Plasma
-io::FixedSizeBufferWriter stream(buffer);
-ARROW_CHECK_OK(arrow::ipc::WriteTensor(t, &stream, &meta_len, &datasize));
-
-// Seal Plasma Object
-// This computes a hash of the object data by default
-ARROW_CHECK_OK(client_.Seal(object_id));
-```
-
-Step 3: Access the Tensor in a Python Process
----------------------------------------------
-In Python, we will construct a Plasma client and point it to the store's socket.
-The `inputs` variable will be a list of Object IDs in their raw byte string form.
-
-```python
-import pyarrow as pa
-import pyarrow.plasma as plasma
-
-plasma_client = plasma.connect('/tmp/plasma')
-
-# inputs: a list of object ids
-inputs = [20 * b'1']
-
-# Construct Object ID and perform a batch get
-object_ids = [plasma.ObjectID(inp) for inp in inputs]
-buffers = plasma_client.get_buffers(object_ids)
-
-# Read the tensor and convert to numpy array for each object
-arrs = []
-for buffer in buffers:
-    reader = pa.BufferReader(buffer)
-    t = pa.read_tensor(reader)
-    arr = t.to_numpy()
-    arrs.append(arr)
-
-# arrs is now a list of numpy arrays
-assert np.all(arrs[0] == 2.0 * np.ones(1000, dtype="float32"))
-```
diff --git a/cpp/build-support/asan_symbolize.py b/cpp/build-support/asan_symbolize.py
deleted file mode 100755
index bffb75a..0000000
--- a/cpp/build-support/asan_symbolize.py
+++ /dev/null
@@ -1,368 +0,0 @@
-#!/usr/bin/env python
-#===- lib/asan/scripts/asan_symbolize.py -----------------------------------===#
-#
-#                     The LLVM Compiler Infrastructure
-#
-# This file is distributed under the University of Illinois Open Source
-# License. See LICENSE.TXT for details.
-#
-#===------------------------------------------------------------------------===#
-import bisect
-import os
-import re
-import subprocess
-import sys
-
-llvm_symbolizer = None
-symbolizers = {}
-filetypes = {}
-vmaddrs = {}
-DEBUG = False
-
-
-# FIXME: merge the code that calls fix_filename().
-def fix_filename(file_name):
-  for path_to_cut in sys.argv[1:]:
-    file_name = re.sub('.*' + path_to_cut, '', file_name)
-  file_name = re.sub('.*asan_[a-z_]*.cc:[0-9]*', '_asan_rtl_', file_name)
-  file_name = re.sub('.*crtstuff.c:0', '???:0', file_name)
-  return file_name
-
-
-class Symbolizer(object):
-  def __init__(self):
-    pass
-
-  def symbolize(self, addr, binary, offset):
-    """Symbolize the given address (pair of binary and offset).
-
-    Overridden in subclasses.
-    Args:
-        addr: virtual address of an instruction.
-        binary: path to executable/shared object containing this instruction.
-        offset: instruction offset in the @binary.
-    Returns:
-        list of strings (one string for each inlined frame) describing
-        the code locations for this instruction (that is, function name, file
-        name, line and column numbers).
-    """
-    return None
-
-
-class LLVMSymbolizer(Symbolizer):
-  def __init__(self, symbolizer_path):
-    super(LLVMSymbolizer, self).__init__()
-    self.symbolizer_path = symbolizer_path
-    self.pipe = self.open_llvm_symbolizer()
-
-  def open_llvm_symbolizer(self):
-    if not os.path.exists(self.symbolizer_path):
-      return None
-    cmd = [self.symbolizer_path,
-           '--use-symbol-table=true',
-           '--demangle=false',
-           '--functions=true',
-           '--inlining=true']
-    if DEBUG:
-      print(' '.join(cmd))
-    return subprocess.Popen(cmd, stdin=subprocess.PIPE,
-                            stdout=subprocess.PIPE)
-
-  def symbolize(self, addr, binary, offset):
-    """Overrides Symbolizer.symbolize."""
-    if not self.pipe:
-      return None
-    result = []
-    try:
-      symbolizer_input = '%s %s' % (binary, offset)
-      if DEBUG:
-        print(symbolizer_input)
-      self.pipe.stdin.write(symbolizer_input)
-      self.pipe.stdin.write('\n')
-      while True:
-        function_name = self.pipe.stdout.readline().rstrip()
-        if not function_name:
-          break
-        file_name = self.pipe.stdout.readline().rstrip()
-        file_name = fix_filename(file_name)
-        if (not function_name.startswith('??') and
-            not file_name.startswith('??')):
-          # Append only valid frames.
-          result.append('%s in %s %s' % (addr, function_name,
-                                         file_name))
-    except Exception:
-      result = []
-    if not result:
-      result = None
-    return result
-
-
-def LLVMSymbolizerFactory(system):
-  symbolizer_path = os.getenv('LLVM_SYMBOLIZER_PATH')
-  if not symbolizer_path:
-    # Assume llvm-symbolizer is in PATH.
-    symbolizer_path = 'llvm-symbolizer'
-  return LLVMSymbolizer(symbolizer_path)
-
-
-class Addr2LineSymbolizer(Symbolizer):
-  def __init__(self, binary):
-    super(Addr2LineSymbolizer, self).__init__()
-    self.binary = binary
-    self.pipe = self.open_addr2line()
-
-  def open_addr2line(self):
-    cmd = ['addr2line', '-f', '-e', self.binary]
-    if DEBUG:
-      print(' '.join(cmd))
-    return subprocess.Popen(cmd,
-                            stdin=subprocess.PIPE, stdout=subprocess.PIPE)
-
-  def symbolize(self, addr, binary, offset):
-    """Overrides Symbolizer.symbolize."""
-    if self.binary != binary:
-      return None
-    try:
-      self.pipe.stdin.write(offset)
-      self.pipe.stdin.write('\n')
-      function_name = self.pipe.stdout.readline().rstrip()
-      file_name = self.pipe.stdout.readline().rstrip()
-    except Exception:
-      function_name = ''
-      file_name = ''
-    file_name = fix_filename(file_name)
-    return ['%s in %s %s' % (addr, function_name, file_name)]
-
-
-class DarwinSymbolizer(Symbolizer):
-  def __init__(self, addr, binary):
-    super(DarwinSymbolizer, self).__init__()
-    self.binary = binary
-    # Guess which arch we're running. 10 = len('0x') + 8 hex digits.
-    if len(addr) > 10:
-      self.arch = 'x86_64'
-    else:
-      self.arch = 'i386'
-    self.vmaddr = None
-    self.pipe = None
-
-  def write_addr_to_pipe(self, offset):
-    self.pipe.stdin.write('0x%x' % int(offset, 16))
-    self.pipe.stdin.write('\n')
-
-  def open_atos(self):
-    if DEBUG:
-      print('atos -o %s -arch %s' % (self.binary, self.arch))
-    cmdline = ['atos', '-o', self.binary, '-arch', self.arch]
-    self.pipe = subprocess.Popen(cmdline,
-                                 stdin=subprocess.PIPE,
-                                 stdout=subprocess.PIPE,
-                                 stderr=subprocess.PIPE)
-
-  def symbolize(self, addr, binary, offset):
-    """Overrides Symbolizer.symbolize."""
-    if self.binary != binary:
-      return None
-    self.open_atos()
-    self.write_addr_to_pipe(offset)
-    self.pipe.stdin.close()
-    atos_line = self.pipe.stdout.readline().rstrip()
-    # A well-formed atos response looks like this:
-    #   foo(type1, type2) (in object.name) (filename.cc:80)
-    match = re.match('^(.*) \(in (.*)\) \((.*:\d*)\)$', atos_line)
-    if DEBUG:
-      print('atos_line: {0}'.format(atos_line))
-    if match:
-      function_name = match.group(1)
-      function_name = re.sub('\(.*?\)', '', function_name)
-      file_name = fix_filename(match.group(3))
-      return ['%s in %s %s' % (addr, function_name, file_name)]
-    else:
-      return ['%s in %s' % (addr, atos_line)]
-
-
-# Chain several symbolizers so that if one symbolizer fails, we fall back
-# to the next symbolizer in chain.
-class ChainSymbolizer(Symbolizer):
-  def __init__(self, symbolizer_list):
-    super(ChainSymbolizer, self).__init__()
-    self.symbolizer_list = symbolizer_list
-
-  def symbolize(self, addr, binary, offset):
-    """Overrides Symbolizer.symbolize."""
-    for symbolizer in self.symbolizer_list:
-      if symbolizer:
-        result = symbolizer.symbolize(addr, binary, offset)
-        if result:
-          return result
-    return None
-
-  def append_symbolizer(self, symbolizer):
-    self.symbolizer_list.append(symbolizer)
-
-
-def BreakpadSymbolizerFactory(binary):
-  suffix = os.getenv('BREAKPAD_SUFFIX')
-  if suffix:
-    filename = binary + suffix
-    if os.access(filename, os.F_OK):
-      return BreakpadSymbolizer(filename)
-  return None
-
-
-def SystemSymbolizerFactory(system, addr, binary):
-  if system == 'Darwin':
-    return DarwinSymbolizer(addr, binary)
-  elif system == 'Linux':
-    return Addr2LineSymbolizer(binary)
-
-
-class BreakpadSymbolizer(Symbolizer):
-  def __init__(self, filename):
-    super(BreakpadSymbolizer, self).__init__()
-    self.filename = filename
-    lines = file(filename).readlines()
-    self.files = []
-    self.symbols = {}
-    self.address_list = []
-    self.addresses = {}
-    # MODULE mac x86_64 A7001116478B33F18FF9BEDE9F615F190 t
-    fragments = lines[0].rstrip().split()
-    self.arch = fragments[2]
-    self.debug_id = fragments[3]
-    self.binary = ' '.join(fragments[4:])
-    self.parse_lines(lines[1:])
-
-  def parse_lines(self, lines):
-    cur_function_addr = ''
-    for line in lines:
-      fragments = line.split()
-      if fragments[0] == 'FILE':
-        assert int(fragments[1]) == len(self.files)
-        self.files.append(' '.join(fragments[2:]))
-      elif fragments[0] == 'PUBLIC':
-        self.symbols[int(fragments[1], 16)] = ' '.join(fragments[3:])
-      elif fragments[0] in ['CFI', 'STACK']:
-        pass
-      elif fragments[0] == 'FUNC':
-        cur_function_addr = int(fragments[1], 16)
-        if not cur_function_addr in self.symbols.keys():
-          self.symbols[cur_function_addr] = ' '.join(fragments[4:])
-      else:
-        # Line starting with an address.
-        addr = int(fragments[0], 16)
-        self.address_list.append(addr)
-        # Tuple of symbol address, size, line, file number.
-        self.addresses[addr] = (cur_function_addr,
-                                int(fragments[1], 16),
-                                int(fragments[2]),
-                                int(fragments[3]))
-    self.address_list.sort()
-
-  def get_sym_file_line(self, addr):
-    key = None
-    if addr in self.addresses.keys():
-      key = addr
-    else:
-      index = bisect.bisect_left(self.address_list, addr)
-      if index == 0:
-        return None
-      else:
-        key = self.address_list[index - 1]
-    sym_id, size, line_no, file_no = self.addresses[key]
-    symbol = self.symbols[sym_id]
-    filename = self.files[file_no]
-    if addr < key + size:
-      return symbol, filename, line_no
-    else:
-      return None
-
-  def symbolize(self, addr, binary, offset):
-    if self.binary != binary:
-      return None
-    res = self.get_sym_file_line(int(offset, 16))
-    if res:
-      function_name, file_name, line_no = res
-      result = ['%s in %s %s:%d' % (
-          addr, function_name, file_name, line_no)]
-      print(result)
-      return result
-    else:
-      return None
-
-
-class SymbolizationLoop(object):
-  def __init__(self, binary_name_filter=None):
-    # Used by clients who may want to supply a different binary name.
-    # E.g. in Chrome several binaries may share a single .dSYM.
-    self.binary_name_filter = binary_name_filter
-    self.system = os.uname()[0]
-    if self.system in ['Linux', 'Darwin']:
-      self.llvm_symbolizer = LLVMSymbolizerFactory(self.system)
-    else:
-      raise Exception('Unknown system')
-
-  def symbolize_address(self, addr, binary, offset):
-    # Use the chain of symbolizers:
-    # Breakpad symbolizer -> LLVM symbolizer -> addr2line/atos
-    # (fall back to next symbolizer if the previous one fails).
-    if not binary in symbolizers:
-      symbolizers[binary] = ChainSymbolizer(
-          [BreakpadSymbolizerFactory(binary), self.llvm_symbolizer])
-    result = symbolizers[binary].symbolize(addr, binary, offset)
-    if result is None:
-      # Initialize system symbolizer only if other symbolizers failed.
-      symbolizers[binary].append_symbolizer(
-          SystemSymbolizerFactory(self.system, addr, binary))
-      result = symbolizers[binary].symbolize(addr, binary, offset)
-    # The system symbolizer must produce some result.
-    assert result
-    return result
-
-  def print_symbolized_lines(self, symbolized_lines):
-    if not symbolized_lines:
-      print(self.current_line)
-    else:
-      for symbolized_frame in symbolized_lines:
-        print('    #' + str(self.frame_no) + ' ' + symbolized_frame.rstrip())
-        self.frame_no += 1
-
-  def process_stdin(self):
-    self.frame_no = 0
-
-    if sys.version_info[0] == 2:
-      sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
-    else:
-      # Unbuffered output is not supported in Python 3
-      sys.stdout = os.fdopen(sys.stdout.fileno(), 'w')
-
-    while True:
-      line = sys.stdin.readline()
-      if not line: break
-      self.current_line = line.rstrip()
-      #0 0x7f6e35cf2e45  (/blah/foo.so+0x11fe45)
-      stack_trace_line_format = (
-          '^( *#([0-9]+) *)(0x[0-9a-f]+) *\((.*)\+(0x[0-9a-f]+)\)')
-      match = re.match(stack_trace_line_format, line)
-      if not match:
-        print(self.current_line)
-        continue
-      if DEBUG:
-        print(line)
-      _, frameno_str, addr, binary, offset = match.groups()
-      if frameno_str == '0':
-        # Assume that frame #0 is the first frame of new stack trace.
-        self.frame_no = 0
-      original_binary = binary
-      if self.binary_name_filter:
-        binary = self.binary_name_filter(binary)
-      symbolized_line = self.symbolize_address(addr, binary, offset)
-      if not symbolized_line:
-        if original_binary != binary:
-          symbolized_line = self.symbolize_address(addr, binary, offset)
-      self.print_symbolized_lines(symbolized_line)
-
-
-if __name__ == '__main__':
-  loop = SymbolizationLoop()
-  loop.process_stdin()
diff --git a/cpp/build-support/build-lz4-lib.sh b/cpp/build-support/build-lz4-lib.sh
deleted file mode 100755
index d350dbc..0000000
--- a/cpp/build-support/build-lz4-lib.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-export CFLAGS="${CFLAGS} -O3 -fPIC"
-if [ -z "$MAKELEVEL" ]; then
-  make -j4 CFLAGS="$CFLAGS" "$@"
-else
-  make CFLAGS="$CFLAGS" "$@"
-fi
diff --git a/cpp/build-support/build-zstd-lib.sh b/cpp/build-support/build-zstd-lib.sh
deleted file mode 100755
index d336866..0000000
--- a/cpp/build-support/build-zstd-lib.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-export CFLAGS="${CFLAGS} -O3 -fPIC"
-if [ -z "$MAKELEVEL" ]; then
-  make -j4
-else
-  make
-fi
diff --git a/cpp/build-support/cpplint.py b/cpp/build-support/cpplint.py
deleted file mode 100755
index 470623d..0000000
--- a/cpp/build-support/cpplint.py
+++ /dev/null
@@ -1,6477 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2009 Google Inc. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#    * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#    * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#    * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Does google-lint on c++ files.
-
-The goal of this script is to identify places in the code that *may*
-be in non-compliance with google style.  It does not attempt to fix
-up these problems -- the point is to educate.  It does also not
-attempt to find all problems, or to ensure that everything it does
-find is legitimately a problem.
-
-In particular, we can get very confused by /* and // inside strings!
-We do a small hack, which is to ignore //'s with "'s after them on the
-same line, but it is far from perfect (in either direction).
-"""
-
-import codecs
-import copy
-import getopt
-import glob
-import itertools
-import math  # for log
-import os
-import re
-import sre_compile
-import string
-import sys
-import unicodedata
-import xml.etree.ElementTree
-
-# if empty, use defaults
-_header_extensions = set([])
-
-# if empty, use defaults
-_valid_extensions = set([])
-
-
-# Files with any of these extensions are considered to be
-# header files (and will undergo different style checks).
-# This set can be extended by using the --headers
-# option (also supported in CPPLINT.cfg)
-def GetHeaderExtensions():
-  if not _header_extensions:
-    return set(['h', 'hpp', 'hxx', 'h++', 'cuh'])
-  return _header_extensions
-
-# The allowed extensions for file names
-# This is set by --extensions flag
-def GetAllExtensions():
-  if not _valid_extensions:
-    return GetHeaderExtensions().union(set(['c', 'cc', 'cpp', 'cxx', 'c++', 'cu']))
-  return _valid_extensions
-
-def GetNonHeaderExtensions():
-  return GetAllExtensions().difference(GetHeaderExtensions())
-
-
-_USAGE = """
-Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit]
-                   [--filter=-x,+y,...]
-                   [--counting=total|toplevel|detailed] [--repository=path]
-                   [--root=subdir] [--linelength=digits] [--recursive]
-                   [--exclude=path]
-                   [--headers=ext1,ext2]
-                   [--extensions=hpp,cpp,...]
-        <file> [file] ...
-
-  The style guidelines this tries to follow are those in
-    https://google.github.io/styleguide/cppguide.html
-
-  Every problem is given a confidence score from 1-5, with 5 meaning we are
-  certain of the problem, and 1 meaning it could be a legitimate construct.
-  This will miss some errors, and is not a substitute for a code review.
-
-  To suppress false-positive errors of a certain category, add a
-  'NOLINT(category)' comment to the line.  NOLINT or NOLINT(*)
-  suppresses errors of all categories on that line.
-
-  The files passed in will be linted; at least one file must be provided.
-  Default linted extensions are %s.
-  Other file types will be ignored.
-  Change the extensions with the --extensions flag.
-
-  Flags:
-
-    output=emacs|eclipse|vs7|junit
-      By default, the output is formatted to ease emacs parsing.  Output
-      compatible with eclipse (eclipse), Visual Studio (vs7), and JUnit
-      XML parsers such as those used in Jenkins and Bamboo may also be
-      used.  Other formats are unsupported.
-
-    verbose=#
-      Specify a number 0-5 to restrict errors to certain verbosity levels.
-      Errors with lower verbosity levels have lower confidence and are more
-      likely to be false positives.
-
-    quiet
-      Suppress output other than linting errors, such as information about
-      which files have been processed and excluded.
-
-    filter=-x,+y,...
-      Specify a comma-separated list of category-filters to apply: only
-      error messages whose category names pass the filters will be printed.
-      (Category names are printed with the message and look like
-      "[whitespace/indent]".)  Filters are evaluated left to right.
-      "-FOO" and "FOO" means "do not print categories that start with FOO".
-      "+FOO" means "do print categories that start with FOO".
-
-      Examples: --filter=-whitespace,+whitespace/braces
-                --filter=whitespace,runtime/printf,+runtime/printf_format
-                --filter=-,+build/include_what_you_use
-
-      To see a list of all the categories used in cpplint, pass no arg:
-         --filter=
-
-    counting=total|toplevel|detailed
-      The total number of errors found is always printed. If
-      'toplevel' is provided, then the count of errors in each of
-      the top-level categories like 'build' and 'whitespace' will
-      also be printed. If 'detailed' is provided, then a count
-      is provided for each category like 'build/class'.
-
-    repository=path
-      The top level directory of the repository, used to derive the header
-      guard CPP variable. By default, this is determined by searching for a
-      path that contains .git, .hg, or .svn. When this flag is specified, the
-      given path is used instead. This option allows the header guard CPP
-      variable to remain consistent even if members of a team have different
-      repository root directories (such as when checking out a subdirectory
-      with SVN). In addition, users of non-mainstream version control systems
-      can use this flag to ensure readable header guard CPP variables.
-
-      Examples:
-        Assuming that Alice checks out ProjectName and Bob checks out
-        ProjectName/trunk and trunk contains src/chrome/ui/browser.h, then
-        with no --repository flag, the header guard CPP variable will be:
-
-        Alice => TRUNK_SRC_CHROME_BROWSER_UI_BROWSER_H_
-        Bob   => SRC_CHROME_BROWSER_UI_BROWSER_H_
-
-        If Alice uses the --repository=trunk flag and Bob omits the flag or
-        uses --repository=. then the header guard CPP variable will be:
-
-        Alice => SRC_CHROME_BROWSER_UI_BROWSER_H_
-        Bob   => SRC_CHROME_BROWSER_UI_BROWSER_H_
-
-    root=subdir
-      The root directory used for deriving header guard CPP variables. This
-      directory is relative to the top level directory of the repository which
-      by default is determined by searching for a directory that contains .git,
-      .hg, or .svn but can also be controlled with the --repository flag. If
-      the specified directory does not exist, this flag is ignored.
-
-      Examples:
-        Assuming that src is the top level directory of the repository, the
-        header guard CPP variables for src/chrome/browser/ui/browser.h are:
-
-        No flag => CHROME_BROWSER_UI_BROWSER_H_
-        --root=chrome => BROWSER_UI_BROWSER_H_
-        --root=chrome/browser => UI_BROWSER_H_
-
-    linelength=digits
-      This is the allowed line length for the project. The default value is
-      80 characters.
-
-      Examples:
-        --linelength=120
-
-    recursive
-      Search for files to lint recursively. Each directory given in the list
-      of files to be linted is replaced by all files that descend from that
-      directory. Files with extensions not in the valid extensions list are
-      excluded.
-
-    exclude=path
-      Exclude the given path from the list of files to be linted. Relative
-      paths are evaluated relative to the current directory and shell globbing
-      is performed. This flag can be provided multiple times to exclude
-      multiple files.
-
-      Examples:
-        --exclude=one.cc
-        --exclude=src/*.cc
-        --exclude=src/*.cc --exclude=test/*.cc
-
-    extensions=extension,extension,...
-      The allowed file extensions that cpplint will check
-
-      Examples:
-        --extensions=%s
-
-    headers=extension,extension,...
-      The allowed header extensions that cpplint will consider to be header files
-      (by default, only files with extensions %s
-      will be assumed to be headers)
-
-      Examples:
-        --headers=%s
-
-    cpplint.py supports per-directory configurations specified in CPPLINT.cfg
-    files. CPPLINT.cfg file can contain a number of key=value pairs.
-    Currently the following options are supported:
-
-      set noparent
-      filter=+filter1,-filter2,...
-      exclude_files=regex
-      linelength=80
-      root=subdir
-
-    "set noparent" option prevents cpplint from traversing directory tree
-    upwards looking for more .cfg files in parent directories. This option
-    is usually placed in the top-level project directory.
-
-    The "filter" option is similar in function to --filter flag. It specifies
-    message filters in addition to the |_DEFAULT_FILTERS| and those specified
-    through --filter command-line flag.
-
-    "exclude_files" allows to specify a regular expression to be matched against
-    a file name. If the expression matches, the file is skipped and not run
-    through the linter.
-
-    "linelength" specifies the allowed line length for the project.
-
-    The "root" option is similar in function to the --root flag (see example
-    above).
-
-    CPPLINT.cfg has an effect on files in the same directory and all
-    subdirectories, unless overridden by a nested configuration file.
-
-      Example file:
-        filter=-build/include_order,+build/include_alpha
-        exclude_files=.*\\.cc
-
-    The above example disables build/include_order warning and enables
-    build/include_alpha as well as excludes all .cc from being
-    processed by linter, in the current directory (where the .cfg
-    file is located) and all subdirectories.
-""" % (list(GetAllExtensions()),
-       ','.join(list(GetAllExtensions())),
-       GetHeaderExtensions(),
-       ','.join(GetHeaderExtensions()))
-
-# We categorize each error message we print.  Here are the categories.
-# We want an explicit list so we can list them all in cpplint --filter=.
-# If you add a new error message with a new category, add it to the list
-# here!  cpplint_unittest.py should tell you if you forget to do this.
-_ERROR_CATEGORIES = [
-    'build/class',
-    'build/c++11',
-    'build/c++14',
-    'build/c++tr1',
-    'build/deprecated',
-    'build/endif_comment',
-    'build/explicit_make_pair',
-    'build/forward_decl',
-    'build/header_guard',
-    'build/include',
-    'build/include_subdir',
-    'build/include_alpha',
-    'build/include_order',
-    'build/include_what_you_use',
-    'build/namespaces_literals',
-    'build/namespaces',
-    'build/printf_format',
-    'build/storage_class',
-    'legal/copyright',
-    'readability/alt_tokens',
-    'readability/braces',
-    'readability/casting',
-    'readability/check',
-    'readability/constructors',
-    'readability/fn_size',
-    'readability/inheritance',
-    'readability/multiline_comment',
-    'readability/multiline_string',
-    'readability/namespace',
-    'readability/nolint',
-    'readability/nul',
-    'readability/strings',
-    'readability/todo',
-    'readability/utf8',
-    'runtime/arrays',
-    'runtime/casting',
-    'runtime/explicit',
-    'runtime/int',
-    'runtime/init',
-    'runtime/invalid_increment',
-    'runtime/member_string_references',
-    'runtime/memset',
-    'runtime/indentation_namespace',
-    'runtime/operator',
-    'runtime/printf',
-    'runtime/printf_format',
-    'runtime/references',
-    'runtime/string',
-    'runtime/threadsafe_fn',
-    'runtime/vlog',
-    'whitespace/blank_line',
-    'whitespace/braces',
-    'whitespace/comma',
-    'whitespace/comments',
-    'whitespace/empty_conditional_body',
-    'whitespace/empty_if_body',
-    'whitespace/empty_loop_body',
-    'whitespace/end_of_line',
-    'whitespace/ending_newline',
-    'whitespace/forcolon',
-    'whitespace/indent',
-    'whitespace/line_length',
-    'whitespace/newline',
-    'whitespace/operators',
-    'whitespace/parens',
-    'whitespace/semicolon',
-    'whitespace/tab',
-    'whitespace/todo',
-    ]
-
-# These error categories are no longer enforced by cpplint, but for backwards-
-# compatibility they may still appear in NOLINT comments.
-_LEGACY_ERROR_CATEGORIES = [
-    'readability/streams',
-    'readability/function',
-    ]
-
-# The default state of the category filter. This is overridden by the --filter=
-# flag. By default all errors are on, so only add here categories that should be
-# off by default (i.e., categories that must be enabled by the --filter= flags).
-# All entries here should start with a '-' or '+', as in the --filter= flag.
-_DEFAULT_FILTERS = ['-build/include_alpha']
-
-# The default list of categories suppressed for C (not C++) files.
-_DEFAULT_C_SUPPRESSED_CATEGORIES = [
-    'readability/casting',
-    ]
-
-# The default list of categories suppressed for Linux Kernel files.
-_DEFAULT_KERNEL_SUPPRESSED_CATEGORIES = [
-    'whitespace/tab',
-    ]
-
-# We used to check for high-bit characters, but after much discussion we
-# decided those were OK, as long as they were in UTF-8 and didn't represent
-# hard-coded international strings, which belong in a separate i18n file.
-
-# C++ headers
-_CPP_HEADERS = frozenset([
-    # Legacy
-    'algobase.h',
-    'algo.h',
-    'alloc.h',
-    'builtinbuf.h',
-    'bvector.h',
-    'complex.h',
-    'defalloc.h',
-    'deque.h',
-    'editbuf.h',
-    'fstream.h',
-    'function.h',
-    'hash_map',
-    'hash_map.h',
-    'hash_set',
-    'hash_set.h',
-    'hashtable.h',
-    'heap.h',
-    'indstream.h',
-    'iomanip.h',
-    'iostream.h',
-    'istream.h',
-    'iterator.h',
-    'list.h',
-    'map.h',
-    'multimap.h',
-    'multiset.h',
-    'ostream.h',
-    'pair.h',
-    'parsestream.h',
-    'pfstream.h',
-    'procbuf.h',
-    'pthread_alloc',
-    'pthread_alloc.h',
-    'rope',
-    'rope.h',
-    'ropeimpl.h',
-    'set.h',
-    'slist',
-    'slist.h',
-    'stack.h',
-    'stdiostream.h',
-    'stl_alloc.h',
-    'stl_relops.h',
-    'streambuf.h',
-    'stream.h',
-    'strfile.h',
-    'strstream.h',
-    'tempbuf.h',
-    'tree.h',
-    'type_traits.h',
-    'vector.h',
-    # 17.6.1.2 C++ library headers
-    'algorithm',
-    'array',
-    'atomic',
-    'bitset',
-    'chrono',
-    'codecvt',
-    'complex',
-    'condition_variable',
-    'deque',
-    'exception',
-    'forward_list',
-    'fstream',
-    'functional',
-    'future',
-    'initializer_list',
-    'iomanip',
-    'ios',
-    'iosfwd',
-    'iostream',
-    'istream',
-    'iterator',
-    'limits',
-    'list',
-    'locale',
-    'map',
-    'memory',
-    'mutex',
-    'new',
-    'numeric',
-    'ostream',
-    'queue',
-    'random',
-    'ratio',
-    'regex',
-    'scoped_allocator',
-    'set',
-    'sstream',
-    'stack',
-    'stdexcept',
-    'streambuf',
-    'string',
-    'strstream',
-    'system_error',
-    'thread',
-    'tuple',
-    'typeindex',
-    'typeinfo',
-    'type_traits',
-    'unordered_map',
-    'unordered_set',
-    'utility',
-    'valarray',
-    'vector',
-    # 17.6.1.2 C++ headers for C library facilities
-    'cassert',
-    'ccomplex',
-    'cctype',
-    'cerrno',
-    'cfenv',
-    'cfloat',
-    'cinttypes',
-    'ciso646',
-    'climits',
-    'clocale',
-    'cmath',
-    'csetjmp',
-    'csignal',
-    'cstdalign',
-    'cstdarg',
-    'cstdbool',
-    'cstddef',
-    'cstdint',
-    'cstdio',
-    'cstdlib',
-    'cstring',
-    'ctgmath',
-    'ctime',
-    'cuchar',
-    'cwchar',
-    'cwctype',
-    ])
-
-# Type names
-_TYPES = re.compile(
-    r'^(?:'
-    # [dcl.type.simple]
-    r'(char(16_t|32_t)?)|wchar_t|'
-    r'bool|short|int|long|signed|unsigned|float|double|'
-    # [support.types]
-    r'(ptrdiff_t|size_t|max_align_t|nullptr_t)|'
-    # [cstdint.syn]
-    r'(u?int(_fast|_least)?(8|16|32|64)_t)|'
-    r'(u?int(max|ptr)_t)|'
-    r')$')
-
-
-# These headers are excluded from [build/include] and [build/include_order]
-# checks:
-# - Anything not following google file name conventions (containing an
-#   uppercase character, such as Python.h or nsStringAPI.h, for example).
-# - Lua headers.
-_THIRD_PARTY_HEADERS_PATTERN = re.compile(
-    r'^(?:[^/]*[A-Z][^/]*\.h|lua\.h|lauxlib\.h|lualib\.h)$')
-
-# Pattern for matching FileInfo.BaseName() against test file name
-_test_suffixes = ['_test', '_regtest', '_unittest']
-_TEST_FILE_SUFFIX = '(' + '|'.join(_test_suffixes) + r')$'
-
-# Pattern that matches only complete whitespace, possibly across multiple lines.
-_EMPTY_CONDITIONAL_BODY_PATTERN = re.compile(r'^\s*$', re.DOTALL)
-
-# Assertion macros.  These are defined in base/logging.h and
-# testing/base/public/gunit.h.
-_CHECK_MACROS = [
-    'DCHECK', 'CHECK',
-    'EXPECT_TRUE', 'ASSERT_TRUE',
-    'EXPECT_FALSE', 'ASSERT_FALSE',
-    ]
-
-# Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE
-_CHECK_REPLACEMENT = dict([(macro_var, {}) for macro_var in _CHECK_MACROS])
-
-for op, replacement in [('==', 'EQ'), ('!=', 'NE'),
-                        ('>=', 'GE'), ('>', 'GT'),
-                        ('<=', 'LE'), ('<', 'LT')]:
-  _CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement
-  _CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement
-  _CHECK_REPLACEMENT['EXPECT_TRUE'][op] = 'EXPECT_%s' % replacement
-  _CHECK_REPLACEMENT['ASSERT_TRUE'][op] = 'ASSERT_%s' % replacement
-
-for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'),
-                            ('>=', 'LT'), ('>', 'LE'),
-                            ('<=', 'GT'), ('<', 'GE')]:
-  _CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement
-  _CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement
-
-# Alternative tokens and their replacements.  For full list, see section 2.5
-# Alternative tokens [lex.digraph] in the C++ standard.
-#
-# Digraphs (such as '%:') are not included here since it's a mess to
-# match those on a word boundary.
-_ALT_TOKEN_REPLACEMENT = {
-    'and': '&&',
-    'bitor': '|',
-    'or': '||',
-    'xor': '^',
-    'compl': '~',
-    'bitand': '&',
-    'and_eq': '&=',
-    'or_eq': '|=',
-    'xor_eq': '^=',
-    'not': '!',
-    'not_eq': '!='
-    }
-
-# Compile regular expression that matches all the above keywords.  The "[ =()]"
-# bit is meant to avoid matching these keywords outside of boolean expressions.
-#
-# False positives include C-style multi-line comments and multi-line strings
-# but those have always been troublesome for cpplint.
-_ALT_TOKEN_REPLACEMENT_PATTERN = re.compile(
-    r'[ =()](' + ('|'.join(_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)')
-
-
-# These constants define types of headers for use with
-# _IncludeState.CheckNextIncludeOrder().
-_C_SYS_HEADER = 1
-_CPP_SYS_HEADER = 2
-_LIKELY_MY_HEADER = 3
-_POSSIBLE_MY_HEADER = 4
-_OTHER_HEADER = 5
-
-# These constants define the current inline assembly state
-_NO_ASM = 0       # Outside of inline assembly block
-_INSIDE_ASM = 1   # Inside inline assembly block
-_END_ASM = 2      # Last line of inline assembly block
-_BLOCK_ASM = 3    # The whole block is an inline assembly block
-
-# Match start of assembly blocks
-_MATCH_ASM = re.compile(r'^\s*(?:asm|_asm|__asm|__asm__)'
-                        r'(?:\s+(volatile|__volatile__))?'
-                        r'\s*[{(]')
-
-# Match strings that indicate we're working on a C (not C++) file.
-_SEARCH_C_FILE = re.compile(r'\b(?:LINT_C_FILE|'
-                            r'vim?:\s*.*(\s*|:)filetype=c(\s*|:|$))')
-
-# Match string that indicates we're working on a Linux Kernel file.
-_SEARCH_KERNEL_FILE = re.compile(r'\b(?:LINT_KERNEL_FILE)')
-
-_regexp_compile_cache = {}
-
-# {str, set(int)}: a map from error categories to sets of linenumbers
-# on which those errors are expected and should be suppressed.
-_error_suppressions = {}
-
-# The root directory used for deriving header guard CPP variable.
-# This is set by --root flag.
-_root = None
-
-# The top level repository directory. If set, _root is calculated relative to
-# this directory instead of the directory containing version control artifacts.
-# This is set by the --repository flag.
-_repository = None
-
-# Files to exclude from linting. This is set by the --exclude flag.
-_excludes = None
-
-# Whether to suppress PrintInfo messages
-_quiet = False
-
-# The allowed line length of files.
-# This is set by --linelength flag.
-_line_length = 80
-
-try:
-  xrange(1, 0)
-except NameError:
-  #  -- pylint: disable=redefined-builtin
-  xrange = range
-
-try:
-  unicode
-except NameError:
-  #  -- pylint: disable=redefined-builtin
-  basestring = unicode = str
-
-try:
-  long(2)
-except NameError:
-  #  -- pylint: disable=redefined-builtin
-  long = int
-
-if sys.version_info < (3,):
-  #  -- pylint: disable=no-member
-  # BINARY_TYPE = str
-  itervalues = dict.itervalues
-  iteritems = dict.iteritems
-else:
-  # BINARY_TYPE = bytes
-  itervalues = dict.values
-  iteritems = dict.items
-
-def unicode_escape_decode(x):
-  if sys.version_info < (3,):
-    return codecs.unicode_escape_decode(x)[0]
-  else:
-    return x
-
-# {str, bool}: a map from error categories to booleans which indicate if the
-# category should be suppressed for every line.
-_global_error_suppressions = {}
-
-
-
-
-def ParseNolintSuppressions(filename, raw_line, linenum, error):
-  """Updates the global list of line error-suppressions.
-
-  Parses any NOLINT comments on the current line, updating the global
-  error_suppressions store.  Reports an error if the NOLINT comment
-  was malformed.
-
-  Args:
-    filename: str, the name of the input file.
-    raw_line: str, the line of input text, with comments.
-    linenum: int, the number of the current line.
-    error: function, an error handler.
-  """
-  matched = Search(r'\bNOLINT(NEXTLINE)?\b(\([^)]+\))?', raw_line)
-  if matched:
-    if matched.group(1):
-      suppressed_line = linenum + 1
-    else:
-      suppressed_line = linenum
-    category = matched.group(2)
-    if category in (None, '(*)'):  # => "suppress all"
-      _error_suppressions.setdefault(None, set()).add(suppressed_line)
-    else:
-      if category.startswith('(') and category.endswith(')'):
-        category = category[1:-1]
-        if category in _ERROR_CATEGORIES:
-          _error_suppressions.setdefault(category, set()).add(suppressed_line)
-        elif category not in _LEGACY_ERROR_CATEGORIES:
-          error(filename, linenum, 'readability/nolint', 5,
-                'Unknown NOLINT error category: %s' % category)
-
-
-def ProcessGlobalSuppresions(lines):
-  """Updates the list of global error suppressions.
-
-  Parses any lint directives in the file that have global effect.
-
-  Args:
-    lines: An array of strings, each representing a line of the file, with the
-           last element being empty if the file is terminated with a newline.
-  """
-  for line in lines:
-    if _SEARCH_C_FILE.search(line):
-      for category in _DEFAULT_C_SUPPRESSED_CATEGORIES:
-        _global_error_suppressions[category] = True
-    if _SEARCH_KERNEL_FILE.search(line):
-      for category in _DEFAULT_KERNEL_SUPPRESSED_CATEGORIES:
-        _global_error_suppressions[category] = True
-
-
-def ResetNolintSuppressions():
-  """Resets the set of NOLINT suppressions to empty."""
-  _error_suppressions.clear()
-  _global_error_suppressions.clear()
-
-
-def IsErrorSuppressedByNolint(category, linenum):
-  """Returns true if the specified error category is suppressed on this line.
-
-  Consults the global error_suppressions map populated by
-  ParseNolintSuppressions/ProcessGlobalSuppresions/ResetNolintSuppressions.
-
-  Args:
-    category: str, the category of the error.
-    linenum: int, the current line number.
-  Returns:
-    bool, True iff the error should be suppressed due to a NOLINT comment or
-    global suppression.
-  """
-  return (_global_error_suppressions.get(category, False) or
-          linenum in _error_suppressions.get(category, set()) or
-          linenum in _error_suppressions.get(None, set()))
-
-
-def Match(pattern, s):
-  """Matches the string with the pattern, caching the compiled regexp."""
-  # The regexp compilation caching is inlined in both Match and Search for
-  # performance reasons; factoring it out into a separate function turns out
-  # to be noticeably expensive.
-  if pattern not in _regexp_compile_cache:
-    _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
-  return _regexp_compile_cache[pattern].match(s)
-
-
-def ReplaceAll(pattern, rep, s):
-  """Replaces instances of pattern in a string with a replacement.
-
-  The compiled regex is kept in a cache shared by Match and Search.
-
-  Args:
-    pattern: regex pattern
-    rep: replacement text
-    s: search string
-
-  Returns:
-    string with replacements made (or original string if no replacements)
-  """
-  if pattern not in _regexp_compile_cache:
-    _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
-  return _regexp_compile_cache[pattern].sub(rep, s)
-
-
-def Search(pattern, s):
-  """Searches the string for the pattern, caching the compiled regexp."""
-  if pattern not in _regexp_compile_cache:
-    _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
-  return _regexp_compile_cache[pattern].search(s)
-
-
-def _IsSourceExtension(s):
-  """File extension (excluding dot) matches a source file extension."""
-  return s in GetNonHeaderExtensions()
-
-
-class _IncludeState(object):
-  """Tracks line numbers for includes, and the order in which includes appear.
-
-  include_list contains list of lists of (header, line number) pairs.
-  It's a lists of lists rather than just one flat list to make it
-  easier to update across preprocessor boundaries.
-
-  Call CheckNextIncludeOrder() once for each header in the file, passing
-  in the type constants defined above. Calls in an illegal order will
-  raise an _IncludeError with an appropriate error message.
-
-  """
-  # self._section will move monotonically through this set. If it ever
-  # needs to move backwards, CheckNextIncludeOrder will raise an error.
-  _INITIAL_SECTION = 0
-  _MY_H_SECTION = 1
-  _C_SECTION = 2
-  _CPP_SECTION = 3
-  _OTHER_H_SECTION = 4
-
-  _TYPE_NAMES = {
-      _C_SYS_HEADER: 'C system header',
-      _CPP_SYS_HEADER: 'C++ system header',
-      _LIKELY_MY_HEADER: 'header this file implements',
-      _POSSIBLE_MY_HEADER: 'header this file may implement',
-      _OTHER_HEADER: 'other header',
-      }
-  _SECTION_NAMES = {
-      _INITIAL_SECTION: "... nothing. (This can't be an error.)",
-      _MY_H_SECTION: 'a header this file implements',
-      _C_SECTION: 'C system header',
-      _CPP_SECTION: 'C++ system header',
-      _OTHER_H_SECTION: 'other header',
-      }
-
-  def __init__(self):
-    self.include_list = [[]]
-    self._section = None
-    self._last_header = None
-    self.ResetSection('')
-
-  def FindHeader(self, header):
-    """Check if a header has already been included.
-
-    Args:
-      header: header to check.
-    Returns:
-      Line number of previous occurrence, or -1 if the header has not
-      been seen before.
-    """
-    for section_list in self.include_list:
-      for f in section_list:
-        if f[0] == header:
-          return f[1]
-    return -1
-
-  def ResetSection(self, directive):
-    """Reset section checking for preprocessor directive.
-
-    Args:
-      directive: preprocessor directive (e.g. "if", "else").
-    """
-    # The name of the current section.
-    self._section = self._INITIAL_SECTION
-    # The path of last found header.
-    self._last_header = ''
-
-    # Update list of includes.  Note that we never pop from the
-    # include list.
-    if directive in ('if', 'ifdef', 'ifndef'):
-      self.include_list.append([])
-    elif directive in ('else', 'elif'):
-      self.include_list[-1] = []
-
-  def SetLastHeader(self, header_path):
-    self._last_header = header_path
-
-  def CanonicalizeAlphabeticalOrder(self, header_path):
-    """Returns a path canonicalized for alphabetical comparison.
-
-    - replaces "-" with "_" so they both cmp the same.
-    - removes '-inl' since we don't require them to be after the main header.
-    - lowercase everything, just in case.
-
-    Args:
-      header_path: Path to be canonicalized.
-
-    Returns:
-      Canonicalized path.
-    """
-    return header_path.replace('-inl.h', '.h').replace('-', '_').lower()
-
-  def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path):
-    """Check if a header is in alphabetical order with the previous header.
-
-    Args:
-      clean_lines: A CleansedLines instance containing the file.
-      linenum: The number of the line to check.
-      header_path: Canonicalized header to be checked.
-
-    Returns:
-      Returns true if the header is in alphabetical order.
-    """
-    # If previous section is different from current section, _last_header will
-    # be reset to empty string, so it's always less than current header.
-    #
-    # If previous line was a blank line, assume that the headers are
-    # intentionally sorted the way they are.
-    if (self._last_header > header_path and
-        Match(r'^\s*#\s*include\b', clean_lines.elided[linenum - 1])):
-      return False
-    return True
-
-  def CheckNextIncludeOrder(self, header_type):
-    """Returns a non-empty error message if the next header is out of order.
-
-    This function also updates the internal state to be ready to check
-    the next include.
-
-    Args:
-      header_type: One of the _XXX_HEADER constants defined above.
-
-    Returns:
-      The empty string if the header is in the right order, or an
-      error message describing what's wrong.
-
-    """
-    error_message = ('Found %s after %s' %
-                     (self._TYPE_NAMES[header_type],
-                      self._SECTION_NAMES[self._section]))
-
-    last_section = self._section
-
-    if header_type == _C_SYS_HEADER:
-      if self._section <= self._C_SECTION:
-        self._section = self._C_SECTION
-      else:
-        self._last_header = ''
-        return error_message
-    elif header_type == _CPP_SYS_HEADER:
-      if self._section <= self._CPP_SECTION:
-        self._section = self._CPP_SECTION
-      else:
-        self._last_header = ''
-        return error_message
-    elif header_type == _LIKELY_MY_HEADER:
-      if self._section <= self._MY_H_SECTION:
-        self._section = self._MY_H_SECTION
-      else:
-        self._section = self._OTHER_H_SECTION
-    elif header_type == _POSSIBLE_MY_HEADER:
-      if self._section <= self._MY_H_SECTION:
-        self._section = self._MY_H_SECTION
-      else:
-        # This will always be the fallback because we're not sure
-        # enough that the header is associated with this file.
-        self._section = self._OTHER_H_SECTION
-    else:
-      assert header_type == _OTHER_HEADER
-      self._section = self._OTHER_H_SECTION
-
-    if last_section != self._section:
-      self._last_header = ''
-
-    return ''
-
-
-class _CppLintState(object):
-  """Maintains module-wide state.."""
-
-  def __init__(self):
-    self.verbose_level = 1  # global setting.
-    self.error_count = 0    # global count of reported errors
-    # filters to apply when emitting error messages
-    self.filters = _DEFAULT_FILTERS[:]
-    # backup of filter list. Used to restore the state after each file.
-    self._filters_backup = self.filters[:]
-    self.counting = 'total'  # In what way are we counting errors?
-    self.errors_by_category = {}  # string to int dict storing error counts
-
-    # output format:
-    # "emacs" - format that emacs can parse (default)
-    # "eclipse" - format that eclipse can parse
-    # "vs7" - format that Microsoft Visual Studio 7 can parse
-    # "junit" - format that Jenkins, Bamboo, etc can parse
-    self.output_format = 'emacs'
-
-    # For JUnit output, save errors and failures until the end so that they
-    # can be written into the XML
-    self._junit_errors = []
-    self._junit_failures = []
-
-  def SetOutputFormat(self, output_format):
-    """Sets the output format for errors."""
-    self.output_format = output_format
-
-  def SetVerboseLevel(self, level):
-    """Sets the module's verbosity, and returns the previous setting."""
-    last_verbose_level = self.verbose_level
-    self.verbose_level = level
-    return last_verbose_level
-
-  def SetCountingStyle(self, counting_style):
-    """Sets the module's counting options."""
-    self.counting = counting_style
-
-  def SetFilters(self, filters):
-    """Sets the error-message filters.
-
-    These filters are applied when deciding whether to emit a given
-    error message.
-
-    Args:
-      filters: A string of comma-separated filters (eg "+whitespace/indent").
-               Each filter should start with + or -; else we die.
-
-    Raises:
-      ValueError: The comma-separated filters did not all start with '+' or '-'.
-                  E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter"
-    """
-    # Default filters always have less priority than the flag ones.
-    self.filters = _DEFAULT_FILTERS[:]
-    self.AddFilters(filters)
-
-  def AddFilters(self, filters):
-    """ Adds more filters to the existing list of error-message filters. """
-    for filt in filters.split(','):
-      clean_filt = filt.strip()
-      if clean_filt:
-        self.filters.append(clean_filt)
-    for filt in self.filters:
-      if not (filt.startswith('+') or filt.startswith('-')):
-        raise ValueError('Every filter in --filters must start with + or -'
-                         ' (%s does not)' % filt)
-
-  def BackupFilters(self):
-    """ Saves the current filter list to backup storage."""
-    self._filters_backup = self.filters[:]
-
-  def RestoreFilters(self):
-    """ Restores filters previously backed up."""
-    self.filters = self._filters_backup[:]
-
-  def ResetErrorCounts(self):
-    """Sets the module's error statistic back to zero."""
-    self.error_count = 0
-    self.errors_by_category = {}
-
-  def IncrementErrorCount(self, category):
-    """Bumps the module's error statistic."""
-    self.error_count += 1
-    if self.counting in ('toplevel', 'detailed'):
-      if self.counting != 'detailed':
-        category = category.split('/')[0]
-      if category not in self.errors_by_category:
-        self.errors_by_category[category] = 0
-      self.errors_by_category[category] += 1
-
-  def PrintErrorCounts(self):
-    """Print a summary of errors by category, and the total."""
-    for category, count in sorted(iteritems(self.errors_by_category)):
-      self.PrintInfo('Category \'%s\' errors found: %d\n' %
-                       (category, count))
-    if self.error_count > 0:
-      self.PrintInfo('Total errors found: %d\n' % self.error_count)
-
-  def PrintInfo(self, message):
-    if not _quiet and self.output_format != 'junit':
-      sys.stderr.write(message)
-
-  def PrintError(self, message):
-    if self.output_format == 'junit':
-      self._junit_errors.append(message)
-    else:
-      sys.stderr.write(message)
-
-  def AddJUnitFailure(self, filename, linenum, message, category, confidence):
-    self._junit_failures.append((filename, linenum, message, category,
-        confidence))
-
-  def FormatJUnitXML(self):
-    num_errors = len(self._junit_errors)
-    num_failures = len(self._junit_failures)
-
-    testsuite = xml.etree.ElementTree.Element('testsuite')
-    testsuite.attrib['name'] = 'cpplint'
-    testsuite.attrib['errors'] = str(num_errors)
-    testsuite.attrib['failures'] = str(num_failures)
-
-    if num_errors == 0 and num_failures == 0:
-      testsuite.attrib['tests'] = str(1)
-      xml.etree.ElementTree.SubElement(testsuite, 'testcase', name='passed')
-
-    else:
-      testsuite.attrib['tests'] = str(num_errors + num_failures)
-      if num_errors > 0:
-        testcase = xml.etree.ElementTree.SubElement(testsuite, 'testcase')
-        testcase.attrib['name'] = 'errors'
-        error = xml.etree.ElementTree.SubElement(testcase, 'error')
-        error.text = '\n'.join(self._junit_errors)
-      if num_failures > 0:
-        # Group failures by file
-        failed_file_order = []
-        failures_by_file = {}
-        for failure in self._junit_failures:
-          failed_file = failure[0]
-          if failed_file not in failed_file_order:
-            failed_file_order.append(failed_file)
-            failures_by_file[failed_file] = []
-          failures_by_file[failed_file].append(failure)
-        # Create a testcase for each file
-        for failed_file in failed_file_order:
-          failures = failures_by_file[failed_file]
-          testcase = xml.etree.ElementTree.SubElement(testsuite, 'testcase')
-          testcase.attrib['name'] = failed_file
-          failure = xml.etree.ElementTree.SubElement(testcase, 'failure')
-          template = '{0}: {1} [{2}] [{3}]'
-          texts = [template.format(f[1], f[2], f[3], f[4]) for f in failures]
-          failure.text = '\n'.join(texts)
-
-    xml_decl = '<?xml version="1.0" encoding="UTF-8" ?>\n'
-    return xml_decl + xml.etree.ElementTree.tostring(testsuite, 'utf-8').decode('utf-8')
-
-
-_cpplint_state = _CppLintState()
-
-
-def _OutputFormat():
-  """Gets the module's output format."""
-  return _cpplint_state.output_format
-
-
-def _SetOutputFormat(output_format):
-  """Sets the module's output format."""
-  _cpplint_state.SetOutputFormat(output_format)
-
-
-def _VerboseLevel():
-  """Returns the module's verbosity setting."""
-  return _cpplint_state.verbose_level
-
-
-def _SetVerboseLevel(level):
-  """Sets the module's verbosity, and returns the previous setting."""
-  return _cpplint_state.SetVerboseLevel(level)
-
-
-def _SetCountingStyle(level):
-  """Sets the module's counting options."""
-  _cpplint_state.SetCountingStyle(level)
-
-
-def _Filters():
-  """Returns the module's list of output filters, as a list."""
-  return _cpplint_state.filters
-
-
-def _SetFilters(filters):
-  """Sets the module's error-message filters.
-
-  These filters are applied when deciding whether to emit a given
-  error message.
-
-  Args:
-    filters: A string of comma-separated filters (eg "whitespace/indent").
-             Each filter should start with + or -; else we die.
-  """
-  _cpplint_state.SetFilters(filters)
-
-def _AddFilters(filters):
-  """Adds more filter overrides.
-
-  Unlike _SetFilters, this function does not reset the current list of filters
-  available.
-
-  Args:
-    filters: A string of comma-separated filters (eg "whitespace/indent").
-             Each filter should start with + or -; else we die.
-  """
-  _cpplint_state.AddFilters(filters)
-
-def _BackupFilters():
-  """ Saves the current filter list to backup storage."""
-  _cpplint_state.BackupFilters()
-
-def _RestoreFilters():
-  """ Restores filters previously backed up."""
-  _cpplint_state.RestoreFilters()
-
-class _FunctionState(object):
-  """Tracks current function name and the number of lines in its body."""
-
-  _NORMAL_TRIGGER = 250  # for --v=0, 500 for --v=1, etc.
-  _TEST_TRIGGER = 400    # about 50% more than _NORMAL_TRIGGER.
-
-  def __init__(self):
-    self.in_a_function = False
-    self.lines_in_function = 0
-    self.current_function = ''
-
-  def Begin(self, function_name):
-    """Start analyzing function body.
-
-    Args:
-      function_name: The name of the function being tracked.
-    """
-    self.in_a_function = True
-    self.lines_in_function = 0
-    self.current_function = function_name
-
-  def Count(self):
-    """Count line in current function body."""
-    if self.in_a_function:
-      self.lines_in_function += 1
-
-  def Check(self, error, filename, linenum):
-    """Report if too many lines in function body.
-
-    Args:
-      error: The function to call with any errors found.
-      filename: The name of the current file.
-      linenum: The number of the line to check.
-    """
-    if not self.in_a_function:
-      return
-
-    if Match(r'T(EST|est)', self.current_function):
-      base_trigger = self._TEST_TRIGGER
-    else:
-      base_trigger = self._NORMAL_TRIGGER
-    trigger = base_trigger * 2**_VerboseLevel()
-
-    if self.lines_in_function > trigger:
-      error_level = int(math.log(self.lines_in_function / base_trigger, 2))
-      # 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...
-      if error_level > 5:
-        error_level = 5
-      error(filename, linenum, 'readability/fn_size', error_level,
-            'Small and focused functions are preferred:'
-            ' %s has %d non-comment lines'
-            ' (error triggered by exceeding %d lines).'  % (
-                self.current_function, self.lines_in_function, trigger))
-
-  def End(self):
-    """Stop analyzing function body."""
-    self.in_a_function = False
-
-
-class _IncludeError(Exception):
-  """Indicates a problem with the include order in a file."""
-  pass
-
-
-class FileInfo(object):
-  """Provides utility functions for filenames.
-
-  FileInfo provides easy access to the components of a file's path
-  relative to the project root.
-  """
-
-  def __init__(self, filename):
-    self._filename = filename
-
-  def FullName(self):
-    """Make Windows paths like Unix."""
-    return os.path.abspath(self._filename).replace('\\', '/')
-
-  def RepositoryName(self):
-    r"""FullName after removing the local path to the repository.
-
-    If we have a real absolute path name here we can try to do something smart:
-    detecting the root of the checkout and truncating /path/to/checkout from
-    the name so that we get header guards that don't include things like
-    "C:\Documents and Settings\..." or "/home/username/..." in them and thus
-    people on different computers who have checked the source out to different
-    locations won't see bogus errors.
-    """
-    fullname = self.FullName()
-
-    if os.path.exists(fullname):
-      project_dir = os.path.dirname(fullname)
-
-      # If the user specified a repository path, it exists, and the file is
-      # contained in it, use the specified repository path
-      if _repository:
-        repo = FileInfo(_repository).FullName()
-        root_dir = project_dir
-        while os.path.exists(root_dir):
-          # allow case insensitive compare on Windows
-          if os.path.normcase(root_dir) == os.path.normcase(repo):
-            return os.path.relpath(fullname, root_dir).replace('\\', '/')
-          one_up_dir = os.path.dirname(root_dir)
-          if one_up_dir == root_dir:
-            break
-          root_dir = one_up_dir
-
-      if os.path.exists(os.path.join(project_dir, ".svn")):
-        # If there's a .svn file in the current directory, we recursively look
-        # up the directory tree for the top of the SVN checkout
-        root_dir = project_dir
-        one_up_dir = os.path.dirname(root_dir)
-        while os.path.exists(os.path.join(one_up_dir, ".svn")):
-          root_dir = os.path.dirname(root_dir)
-          one_up_dir = os.path.dirname(one_up_dir)
-
-        prefix = os.path.commonprefix([root_dir, project_dir])
-        return fullname[len(prefix) + 1:]
-
-      # Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by
-      # searching up from the current path.
-      root_dir = current_dir = os.path.dirname(fullname)
-      while current_dir != os.path.dirname(current_dir):
-        if (os.path.exists(os.path.join(current_dir, ".git")) or
-            os.path.exists(os.path.join(current_dir, ".hg")) or
-            os.path.exists(os.path.join(current_dir, ".svn"))):
-          root_dir = current_dir
-        current_dir = os.path.dirname(current_dir)
-
-      if (os.path.exists(os.path.join(root_dir, ".git")) or
-          os.path.exists(os.path.join(root_dir, ".hg")) or
-          os.path.exists(os.path.join(root_dir, ".svn"))):
-        prefix = os.path.commonprefix([root_dir, project_dir])
-        return fullname[len(prefix) + 1:]
-
-    # Don't know what to do; header guard warnings may be wrong...
-    return fullname
-
-  def Split(self):
-    """Splits the file into the directory, basename, and extension.
-
-    For 'chrome/browser/browser.cc', Split() would
-    return ('chrome/browser', 'browser', '.cc')
-
-    Returns:
-      A tuple of (directory, basename, extension).
-    """
-
-    googlename = self.RepositoryName()
-    project, rest = os.path.split(googlename)
-    return (project,) + os.path.splitext(rest)
-
-  def BaseName(self):
-    """File base name - text after the final slash, before the final period."""
-    return self.Split()[1]
-
-  def Extension(self):
-    """File extension - text following the final period, includes that period."""
-    return self.Split()[2]
-
-  def NoExtension(self):
-    """File has no source file extension."""
-    return '/'.join(self.Split()[0:2])
-
-  def IsSource(self):
-    """File has a source file extension."""
-    return _IsSourceExtension(self.Extension()[1:])
-
-
-def _ShouldPrintError(category, confidence, linenum):
-  """If confidence >= verbose, category passes filter and is not suppressed."""
-
-  # There are three ways we might decide not to print an error message:
-  # a "NOLINT(category)" comment appears in the source,
-  # the verbosity level isn't high enough, or the filters filter it out.
-  if IsErrorSuppressedByNolint(category, linenum):
-    return False
-
-  if confidence < _cpplint_state.verbose_level:
-    return False
-
-  is_filtered = False
-  for one_filter in _Filters():
-    if one_filter.startswith('-'):
-      if category.startswith(one_filter[1:]):
-        is_filtered = True
-    elif one_filter.startswith('+'):
-      if category.startswith(one_filter[1:]):
-        is_filtered = False
-    else:
-      assert False  # should have been checked for in SetFilter.
-  if is_filtered:
-    return False
-
-  return True
-
-
-def Error(filename, linenum, category, confidence, message):
-  """Logs the fact we've found a lint error.
-
-  We log where the error was found, and also our confidence in the error,
-  that is, how certain we are this is a legitimate style regression, and
-  not a misidentification or a use that's sometimes justified.
-
-  False positives can be suppressed by the use of
-  "cpplint(category)"  comments on the offending line.  These are
-  parsed into _error_suppressions.
-
-  Args:
-    filename: The name of the file containing the error.
-    linenum: The number of the line containing the error.
-    category: A string used to describe the "category" this bug
-      falls under: "whitespace", say, or "runtime".  Categories
-      may have a hierarchy separated by slashes: "whitespace/indent".
-    confidence: A number from 1-5 representing a confidence score for
-      the error, with 5 meaning that we are certain of the problem,
-      and 1 meaning that it could be a legitimate construct.
-    message: The error message.
-  """
-  if _ShouldPrintError(category, confidence, linenum):
-    _cpplint_state.IncrementErrorCount(category)
-    if _cpplint_state.output_format == 'vs7':
-      _cpplint_state.PrintError('%s(%s): warning: %s  [%s] [%d]\n' % (
-          filename, linenum, message, category, confidence))
-    elif _cpplint_state.output_format == 'eclipse':
-      sys.stderr.write('%s:%s: warning: %s  [%s] [%d]\n' % (
-          filename, linenum, message, category, confidence))
-    elif _cpplint_state.output_format == 'junit':
-        _cpplint_state.AddJUnitFailure(filename, linenum, message, category,
-            confidence)
-    else:
-      final_message = '%s:%s:  %s  [%s] [%d]\n' % (
-          filename, linenum, message, category, confidence)
-      sys.stderr.write(final_message)
-
-# Matches standard C++ escape sequences per 2.13.2.3 of the C++ standard.
-_RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile(
-    r'\\([abfnrtv?"\\\']|\d+|x[0-9a-fA-F]+)')
-# Match a single C style comment on the same line.
-_RE_PATTERN_C_COMMENTS = r'/\*(?:[^*]|\*(?!/))*\*/'
-# Matches multi-line C style comments.
-# This RE is a little bit more complicated than one might expect, because we
-# have to take care of space removals tools so we can handle comments inside
-# statements better.
-# The current rule is: We only clear spaces from both sides when we're at the
-# end of the line. Otherwise, we try to remove spaces from the right side,
-# if this doesn't work we try on left side but only if there's a non-character
-# on the right.
-_RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile(
-    r'(\s*' + _RE_PATTERN_C_COMMENTS + r'\s*$|' +
-    _RE_PATTERN_C_COMMENTS + r'\s+|' +
-    r'\s+' + _RE_PATTERN_C_COMMENTS + r'(?=\W)|' +
-    _RE_PATTERN_C_COMMENTS + r')')
-
-
-def IsCppString(line):
-  """Does line terminate so, that the next symbol is in string constant.
-
-  This function does not consider single-line nor multi-line comments.
-
-  Args:
-    line: is a partial line of code starting from the 0..n.
-
-  Returns:
-    True, if next character appended to 'line' is inside a
-    string constant.
-  """
-
-  line = line.replace(r'\\', 'XX')  # after this, \\" does not match to \"
-  return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1
-
-
-def CleanseRawStrings(raw_lines):
-  """Removes C++11 raw strings from lines.
-
-    Before:
-      static const char kData[] = R"(
-          multi-line string
-          )";
-
-    After:
-      static const char kData[] = ""
-          (replaced by blank line)
-          "";
-
-  Args:
-    raw_lines: list of raw lines.
-
-  Returns:
-    list of lines with C++11 raw strings replaced by empty strings.
-  """
-
-  delimiter = None
-  lines_without_raw_strings = []
-  for line in raw_lines:
-    if delimiter:
-      # Inside a raw string, look for the end
-      end = line.find(delimiter)
-      if end >= 0:
-        # Found the end of the string, match leading space for this
-        # line and resume copying the original lines, and also insert
-        # a "" on the last line.
-        leading_space = Match(r'^(\s*)\S', line)
-        line = leading_space.group(1) + '""' + line[end + len(delimiter):]
-        delimiter = None
-      else:
-        # Haven't found the end yet, append a blank line.
-        line = '""'
-
-    # Look for beginning of a raw string, and replace them with
-    # empty strings.  This is done in a loop to handle multiple raw
-    # strings on the same line.
-    while delimiter is None:
-      # Look for beginning of a raw string.
-      # See 2.14.15 [lex.string] for syntax.
-      #
-      # Once we have matched a raw string, we check the prefix of the
-      # line to make sure that the line is not part of a single line
-      # comment.  It's done this way because we remove raw strings
-      # before removing comments as opposed to removing comments
-      # before removing raw strings.  This is because there are some
-      # cpplint checks that requires the comments to be preserved, but
-      # we don't want to check comments that are inside raw strings.
-      matched = Match(r'^(.*?)\b(?:R|u8R|uR|UR|LR)"([^\s\\()]*)\((.*)$', line)
-      if (matched and
-          not Match(r'^([^\'"]|\'(\\.|[^\'])*\'|"(\\.|[^"])*")*//',
-                    matched.group(1))):
-        delimiter = ')' + matched.group(2) + '"'
-
-        end = matched.group(3).find(delimiter)
-        if end >= 0:
-          # Raw string ended on same line
-          line = (matched.group(1) + '""' +
-                  matched.group(3)[end + len(delimiter):])
-          delimiter = None
-        else:
-          # Start of a multi-line raw string
-          line = matched.group(1) + '""'
-      else:
-        break
-
-    lines_without_raw_strings.append(line)
-
-  # TODO(unknown): if delimiter is not None here, we might want to
-  # emit a warning for unterminated string.
-  return lines_without_raw_strings
-
-
-def FindNextMultiLineCommentStart(lines, lineix):
-  """Find the beginning marker for a multiline comment."""
-  while lineix < len(lines):
-    if lines[lineix].strip().startswith('/*'):
-      # Only return this marker if the comment goes beyond this line
-      if lines[lineix].strip().find('*/', 2) < 0:
-        return lineix
-    lineix += 1
-  return len(lines)
-
-
-def FindNextMultiLineCommentEnd(lines, lineix):
-  """We are inside a comment, find the end marker."""
-  while lineix < len(lines):
-    if lines[lineix].strip().endswith('*/'):
-      return lineix
-    lineix += 1
-  return len(lines)
-
-
-def RemoveMultiLineCommentsFromRange(lines, begin, end):
-  """Clears a range of lines for multi-line comments."""
-  # Having // dummy comments makes the lines non-empty, so we will not get
-  # unnecessary blank line warnings later in the code.
-  for i in range(begin, end):
-    lines[i] = '/**/'
-
-
-def RemoveMultiLineComments(filename, lines, error):
-  """Removes multiline (c-style) comments from lines."""
-  lineix = 0
-  while lineix < len(lines):
-    lineix_begin = FindNextMultiLineCommentStart(lines, lineix)
-    if lineix_begin >= len(lines):
-      return
-    lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin)
-    if lineix_end >= len(lines):
-      error(filename, lineix_begin + 1, 'readability/multiline_comment', 5,
-            'Could not find end of multi-line comment')
-      return
-    RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1)
-    lineix = lineix_end + 1
-
-
-def CleanseComments(line):
-  """Removes //-comments and single-line C-style /* */ comments.
-
-  Args:
-    line: A line of C++ source.
-
-  Returns:
-    The line with single-line comments removed.
-  """
-  commentpos = line.find('//')
-  if commentpos != -1 and not IsCppString(line[:commentpos]):
-    line = line[:commentpos].rstrip()
-  # get rid of /* ... */
-  return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line)
-
-
-class CleansedLines(object):
-  """Holds 4 copies of all lines with different preprocessing applied to them.
-
-  1) elided member contains lines without strings and comments.
-  2) lines member contains lines without comments.
-  3) raw_lines member contains all the lines without processing.
-  4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw
-     strings removed.
-  All these members are of <type 'list'>, and of the same length.
-  """
-
-  def __init__(self, lines):
-    self.elided = []
-    self.lines = []
-    self.raw_lines = lines
-    self.num_lines = len(lines)
-    self.lines_without_raw_strings = CleanseRawStrings(lines)
-    for linenum in range(len(self.lines_without_raw_strings)):
-      self.lines.append(CleanseComments(
-          self.lines_without_raw_strings[linenum]))
-      elided = self._CollapseStrings(self.lines_without_raw_strings[linenum])
-      self.elided.append(CleanseComments(elided))
-
-  def NumLines(self):
-    """Returns the number of lines represented."""
-    return self.num_lines
-
-  @staticmethod
-  def _CollapseStrings(elided):
-    """Collapses strings and chars on a line to simple "" or '' blocks.
-
-    We nix strings first so we're not fooled by text like '"http://"'
-
-    Args:
-      elided: The line being processed.
-
-    Returns:
-      The line with collapsed strings.
-    """
-    if _RE_PATTERN_INCLUDE.match(elided):
-      return elided
-
-    # Remove escaped characters first to make quote/single quote collapsing
-    # basic.  Things that look like escaped characters shouldn't occur
-    # outside of strings and chars.
-    elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided)
-
-    # Replace quoted strings and digit separators.  Both single quotes
-    # and double quotes are processed in the same loop, otherwise
-    # nested quotes wouldn't work.
-    collapsed = ''
-    while True:
-      # Find the first quote character
-      match = Match(r'^([^\'"]*)([\'"])(.*)$', elided)
-      if not match:
-        collapsed += elided
-        break
-      head, quote, tail = match.groups()
-
-      if quote == '"':
-        # Collapse double quoted strings
-        second_quote = tail.find('"')
-        if second_quote >= 0:
-          collapsed += head + '""'
-          elided = tail[second_quote + 1:]
-        else:
-          # Unmatched double quote, don't bother processing the rest
-          # of the line since this is probably a multiline string.
-          collapsed += elided
-          break
-      else:
-        # Found single quote, check nearby text to eliminate digit separators.
-        #
-        # There is no special handling for floating point here, because
-        # the integer/fractional/exponent parts would all be parsed
-        # correctly as long as there are digits on both sides of the
-        # separator.  So we are fine as long as we don't see something
-        # like "0.'3" (gcc 4.9.0 will not allow this literal).
-        if Search(r'\b(?:0[bBxX]?|[1-9])[0-9a-fA-F]*$', head):
-          match_literal = Match(r'^((?:\'?[0-9a-zA-Z_])*)(.*)$', "'" + tail)
-          collapsed += head + match_literal.group(1).replace("'", '')
-          elided = match_literal.group(2)
-        else:
-          second_quote = tail.find('\'')
-          if second_quote >= 0:
-            collapsed += head + "''"
-            elided = tail[second_quote + 1:]
-          else:
-            # Unmatched single quote
-            collapsed += elided
-            break
-
-    return collapsed
-
-
-def FindEndOfExpressionInLine(line, startpos, stack):
-  """Find the position just after the end of current parenthesized expression.
-
-  Args:
-    line: a CleansedLines line.
-    startpos: start searching at this position.
-    stack: nesting stack at startpos.
-
-  Returns:
-    On finding matching end: (index just after matching end, None)
-    On finding an unclosed expression: (-1, None)
-    Otherwise: (-1, new stack at end of this line)
-  """
-  for i in xrange(startpos, len(line)):
-    char = line[i]
-    if char in '([{':
-      # Found start of parenthesized expression, push to expression stack
-      stack.append(char)
-    elif char == '<':
-      # Found potential start of template argument list
-      if i > 0 and line[i - 1] == '<':
-        # Left shift operator
-        if stack and stack[-1] == '<':
-          stack.pop()
-          if not stack:
-            return (-1, None)
-      elif i > 0 and Search(r'\boperator\s*$', line[0:i]):
-        # operator<, don't add to stack
-        continue
-      else:
-        # Tentative start of template argument list
-        stack.append('<')
-    elif char in ')]}':
-      # Found end of parenthesized expression.
-      #
-      # If we are currently expecting a matching '>', the pending '<'
-      # must have been an operator.  Remove them from expression stack.
-      while stack and stack[-1] == '<':
-        stack.pop()
-      if not stack:
-        return (-1, None)
-      if ((stack[-1] == '(' and char == ')') or
-          (stack[-1] == '[' and char == ']') or
-          (stack[-1] == '{' and char == '}')):
-        stack.pop()
-        if not stack:
-          return (i + 1, None)
-      else:
-        # Mismatched parentheses
-        return (-1, None)
-    elif char == '>':
-      # Found potential end of template argument list.
-
-      # Ignore "->" and operator functions
-      if (i > 0 and
-          (line[i - 1] == '-' or Search(r'\boperator\s*$', line[0:i - 1]))):
-        continue
-
-      # Pop the stack if there is a matching '<'.  Otherwise, ignore
-      # this '>' since it must be an operator.
-      if stack:
-        if stack[-1] == '<':
-          stack.pop()
-          if not stack:
-            return (i + 1, None)
-    elif char == ';':
-      # Found something that look like end of statements.  If we are currently
-      # expecting a '>', the matching '<' must have been an operator, since
-      # template argument list should not contain statements.
-      while stack and stack[-1] == '<':
-        stack.pop()
-      if not stack:
-        return (-1, None)
-
-  # Did not find end of expression or unbalanced parentheses on this line
-  return (-1, stack)
-
-
-def CloseExpression(clean_lines, linenum, pos):
-  """If input points to ( or { or [ or <, finds the position that closes it.
-
-  If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the
-  linenum/pos that correspond to the closing of the expression.
-
-  TODO(unknown): cpplint spends a fair bit of time matching parentheses.
-  Ideally we would want to index all opening and closing parentheses once
-  and have CloseExpression be just a simple lookup, but due to preprocessor
-  tricks, this is not so easy.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    pos: A position on the line.
-
-  Returns:
-    A tuple (line, linenum, pos) pointer *past* the closing brace, or
-    (line, len(lines), -1) if we never find a close.  Note we ignore
-    strings and comments when matching; and the line we return is the
-    'cleansed' line at linenum.
-  """
-
-  line = clean_lines.elided[linenum]
-  if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]):
-    return (line, clean_lines.NumLines(), -1)
-
-  # Check first line
-  (end_pos, stack) = FindEndOfExpressionInLine(line, pos, [])
-  if end_pos > -1:
-    return (line, linenum, end_pos)
-
-  # Continue scanning forward
-  while stack and linenum < clean_lines.NumLines() - 1:
-    linenum += 1
-    line = clean_lines.elided[linenum]
-    (end_pos, stack) = FindEndOfExpressionInLine(line, 0, stack)
-    if end_pos > -1:
-      return (line, linenum, end_pos)
-
-  # Did not find end of expression before end of file, give up
-  return (line, clean_lines.NumLines(), -1)
-
-
-def FindStartOfExpressionInLine(line, endpos, stack):
-  """Find position at the matching start of current expression.
-
-  This is almost the reverse of FindEndOfExpressionInLine, but note
-  that the input position and returned position differs by 1.
-
-  Args:
-    line: a CleansedLines line.
-    endpos: start searching at this position.
-    stack: nesting stack at endpos.
-
-  Returns:
-    On finding matching start: (index at matching start, None)
-    On finding an unclosed expression: (-1, None)
-    Otherwise: (-1, new stack at beginning of this line)
-  """
-  i = endpos
-  while i >= 0:
-    char = line[i]
-    if char in ')]}':
-      # Found end of expression, push to expression stack
-      stack.append(char)
-    elif char == '>':
-      # Found potential end of template argument list.
-      #
-      # Ignore it if it's a "->" or ">=" or "operator>"
-      if (i > 0 and
-          (line[i - 1] == '-' or
-           Match(r'\s>=\s', line[i - 1:]) or
-           Search(r'\boperator\s*$', line[0:i]))):
-        i -= 1
-      else:
-        stack.append('>')
-    elif char == '<':
-      # Found potential start of template argument list
-      if i > 0 and line[i - 1] == '<':
-        # Left shift operator
-        i -= 1
-      else:
-        # If there is a matching '>', we can pop the expression stack.
-        # Otherwise, ignore this '<' since it must be an operator.
-        if stack and stack[-1] == '>':
-          stack.pop()
-          if not stack:
-            return (i, None)
-    elif char in '([{':
-      # Found start of expression.
-      #
-      # If there are any unmatched '>' on the stack, they must be
-      # operators.  Remove those.
-      while stack and stack[-1] == '>':
-        stack.pop()
-      if not stack:
-        return (-1, None)
-      if ((char == '(' and stack[-1] == ')') or
-          (char == '[' and stack[-1] == ']') or
-          (char == '{' and stack[-1] == '}')):
-        stack.pop()
-        if not stack:
-          return (i, None)
-      else:
-        # Mismatched parentheses
-        return (-1, None)
-    elif char == ';':
-      # Found something that look like end of statements.  If we are currently
-      # expecting a '<', the matching '>' must have been an operator, since
-      # template argument list should not contain statements.
-      while stack and stack[-1] == '>':
-        stack.pop()
-      if not stack:
-        return (-1, None)
-
-    i -= 1
-
-  return (-1, stack)
-
-
-def ReverseCloseExpression(clean_lines, linenum, pos):
-  """If input points to ) or } or ] or >, finds the position that opens it.
-
-  If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the
-  linenum/pos that correspond to the opening of the expression.
-
-  Args:
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    pos: A position on the line.
-
-  Returns:
-    A tuple (line, linenum, pos) pointer *at* the opening brace, or
-    (line, 0, -1) if we never find the matching opening brace.  Note
-    we ignore strings and comments when matching; and the line we
-    return is the 'cleansed' line at linenum.
-  """
-  line = clean_lines.elided[linenum]
-  if line[pos] not in ')}]>':
-    return (line, 0, -1)
-
-  # Check last line
-  (start_pos, stack) = FindStartOfExpressionInLine(line, pos, [])
-  if start_pos > -1:
-    return (line, linenum, start_pos)
-
-  # Continue scanning backward
-  while stack and linenum > 0:
-    linenum -= 1
-    line = clean_lines.elided[linenum]
-    (start_pos, stack) = FindStartOfExpressionInLine(line, len(line) - 1, stack)
-    if start_pos > -1:
-      return (line, linenum, start_pos)
-
-  # Did not find start of expression before beginning of file, give up
-  return (line, 0, -1)
-
-
-def CheckForCopyright(filename, lines, error):
-  """Logs an error if no Copyright message appears at the top of the file."""
-
-  # We'll say it should occur by line 10. Don't forget there's a
-  # dummy line at the front.
-  for line in range(1, min(len(lines), 11)):
-    if re.search(r'Copyright', lines[line], re.I): break
-  else:                       # means no copyright line was found
-    error(filename, 0, 'legal/copyright', 5,
-          'No copyright message found.  '
-          'You should have a line: "Copyright [year] <Copyright Owner>"')
-
-
-def GetIndentLevel(line):
-  """Return the number of leading spaces in line.
-
-  Args:
-    line: A string to check.
-
-  Returns:
-    An integer count of leading spaces, possibly zero.
-  """
-  indent = Match(r'^( *)\S', line)
-  if indent:
-    return len(indent.group(1))
-  else:
-    return 0
-
-
-def GetHeaderGuardCPPVariable(filename):
-  """Returns the CPP variable that should be used as a header guard.
-
-  Args:
-    filename: The name of a C++ header file.
-
-  Returns:
-    The CPP variable that should be used as a header guard in the
-    named file.
-
-  """
-
-  # Restores original filename in case that cpplint is invoked from Emacs's
-  # flymake.
-  filename = re.sub(r'_flymake\.h$', '.h', filename)
-  filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename)
-  # Replace 'c++' with 'cpp'.
-  filename = filename.replace('C++', 'cpp').replace('c++', 'cpp')
-
-  fileinfo = FileInfo(filename)
-  file_path_from_root = fileinfo.RepositoryName()
-  if _root:
-    suffix = os.sep
-    # On Windows using directory separator will leave us with
-    # "bogus escape error" unless we properly escape regex.
-    if suffix == '\\':
-      suffix += '\\'
-    file_path_from_root = re.sub('^' + _root + suffix, '', file_path_from_root)
-  return re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_'
-
-
-def CheckForHeaderGuard(filename, clean_lines, error):
-  """Checks that the file contains a header guard.
-
-  Logs an error if no #ifndef header guard is present.  For other
-  headers, checks that the full pathname is used.
-
-  Args:
-    filename: The name of the C++ header file.
-    clean_lines: A CleansedLines instance containing the file.
-    error: The function to call with any errors found.
-  """
-
-  # Don't check for header guards if there are error suppression
-  # comments somewhere in this file.
-  #
-  # Because this is silencing a warning for a nonexistent line, we
-  # only support the very specific NOLINT(build/header_guard) syntax,
-  # and not the general NOLINT or NOLINT(*) syntax.
-  raw_lines = clean_lines.lines_without_raw_strings
-  for i in raw_lines:
-    if Search(r'//\s*NOLINT\(build/header_guard\)', i):
-      return
-
-  # Allow pragma once instead of header guards
-  for i in raw_lines:
-    if Search(r'^\s*#pragma\s+once', i):
-      return
-
-  cppvar = GetHeaderGuardCPPVariable(filename)
-
-  ifndef = ''
-  ifndef_linenum = 0
-  define = ''
-  endif = ''
-  endif_linenum = 0
-  for linenum, line in enumerate(raw_lines):
-    linesplit = line.split()
-    if len(linesplit) >= 2:
-      # find the first occurrence of #ifndef and #define, save arg
-      if not ifndef and linesplit[0] == '#ifndef':
-        # set ifndef to the header guard presented on the #ifndef line.
-        ifndef = linesplit[1]
-        ifndef_linenum = linenum
-      if not define and linesplit[0] == '#define':
-        define = linesplit[1]
-    # find the last occurrence of #endif, save entire line
-    if line.startswith('#endif'):
-      endif = line
-      endif_linenum = linenum
-
-  if not ifndef or not define or ifndef != define:
-    error(filename, 0, 'build/header_guard', 5,
-          'No #ifndef header guard found, suggested CPP variable is: %s' %
-          cppvar)
-    return
-
-  # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__
-  # for backward compatibility.
-  if ifndef != cppvar:
-    error_level = 0
-    if ifndef != cppvar + '_':
-      error_level = 5
-
-    ParseNolintSuppressions(filename, raw_lines[ifndef_linenum], ifndef_linenum,
-                            error)
-    error(filename, ifndef_linenum, 'build/header_guard', error_level,
-          '#ifndef header guard has wrong style, please use: %s' % cppvar)
-
-  # Check for "//" comments on endif line.
-  ParseNolintSuppressions(filename, raw_lines[endif_linenum], endif_linenum,
-                          error)
-  match = Match(r'#endif\s*//\s*' + cppvar + r'(_)?\b', endif)
-  if match:
-    if match.group(1) == '_':
-      # Issue low severity warning for deprecated double trailing underscore
-      error(filename, endif_linenum, 'build/header_guard', 0,
-            '#endif line should be "#endif  // %s"' % cppvar)
-    return
-
-  # Didn't find the corresponding "//" comment.  If this file does not
-  # contain any "//" comments at all, it could be that the compiler
-  # only wants "/**/" comments, look for those instead.
-  no_single_line_comments = True
-  for i in xrange(1, len(raw_lines) - 1):
-    line = raw_lines[i]
-    if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//', line):
-      no_single_line_comments = False
-      break
-
-  if no_single_line_comments:
-    match = Match(r'#endif\s*/\*\s*' + cppvar + r'(_)?\s*\*/', endif)
-    if match:
-      if match.group(1) == '_':
-        # Low severity warning for double trailing underscore
-        error(filename, endif_linenum, 'build/header_guard', 0,
-              '#endif line should be "#endif  /* %s */"' % cppvar)
-      return
-
-  # Didn't find anything
-  error(filename, endif_linenum, 'build/header_guard', 5,
-        '#endif line should be "#endif  // %s"' % cppvar)
-
-
-def CheckHeaderFileIncluded(filename, include_state, error):
-  """Logs an error if a source file does not include its header."""
-
-  # Do not check test files
-  fileinfo = FileInfo(filename)
-  if Search(_TEST_FILE_SUFFIX, fileinfo.BaseName()):
-    return
-
-  for ext in GetHeaderExtensions():
-      basefilename = filename[0:len(filename) - len(fileinfo.Extension())]
-      headerfile = basefilename + '.' + ext
-      if not os.path.exists(headerfile):
-        continue
-      headername = FileInfo(headerfile).RepositoryName()
-      first_include = None
-      for section_list in include_state.include_list:
-        for f in section_list:
-          if headername in f[0] or f[0] in headername:
-            return
-          if not first_include:
-            first_include = f[1]
-
-      error(filename, first_include, 'build/include', 5,
-            '%s should include its header file %s' % (fileinfo.RepositoryName(),
-                                                      headername))
-
-
-def CheckForBadCharacters(filename, lines, error):
-  """Logs an error for each line containing bad characters.
-
-  Two kinds of bad characters:
-
-  1. Unicode replacement characters: These indicate that either the file
-  contained invalid UTF-8 (likely) or Unicode replacement characters (which
-  it shouldn't).  Note that it's possible for this to throw off line
-  numbering if the invalid UTF-8 occurred adjacent to a newline.
-
-  2. NUL bytes.  These are problematic for some tools.
-
-  Args:
-    filename: The name of the current file.
-    lines: An array of strings, each representing a line of the file.
-    error: The function to call with any errors found.
-  """
-  for linenum, line in enumerate(lines):
-    if unicode_escape_decode('\ufffd') in line:
-      error(filename, linenum, 'readability/utf8', 5,
-            'Line contains invalid UTF-8 (or Unicode replacement character).')
-    if '\0' in line:
-      error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.')
-
-
-def CheckForNewlineAtEOF(filename, lines, error):
-  """Logs an error if there is no newline char at the end of the file.
-
-  Args:
-    filename: The name of the current file.
-    lines: An array of strings, each representing a line of the file.
-    error: The function to call with any errors found.
-  """
-
-  # The array lines() was created by adding two newlines to the
-  # original file (go figure), then splitting on \n.
-  # To verify that the file ends in \n, we just have to make sure the
-  # last-but-two element of lines() exists and is empty.
-  if len(lines) < 3 or lines[-2]:
-    error(filename, len(lines) - 2, 'whitespace/ending_newline', 5,
-          'Could not find a newline character at the end of the file.')
-
-
-def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
-  """Logs an error if we see /* ... */ or "..." that extend past one line.
-
-  /* ... */ comments are legit inside macros, for one line.
-  Otherwise, we prefer // comments, so it's ok to warn about the
-  other.  Likewise, it's ok for strings to extend across multiple
-  lines, as long as a line continuation character (backslash)
-  terminates each line. Although not currently prohibited by the C++
-  style guide, it's ugly and unnecessary. We don't do well with either
-  in this lint program, so we warn about both.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # Remove all \\ (escaped backslashes) from the line. They are OK, and the
-  # second (escaped) slash may trigger later \" detection erroneously.
-  line = line.replace('\\\\', '')
-
-  if line.count('/*') > line.count('*/'):
-    error(filename, linenum, 'readability/multiline_comment', 5,
-          'Complex multi-line /*...*/-style comment found. '
-          'Lint may give bogus warnings.  '
-          'Consider replacing these with //-style comments, '
-          'with #if 0...#endif, '
-          'or with more clearly structured multi-line comments.')
-
-  if (line.count('"') - line.count('\\"')) % 2:
-    error(filename, linenum, 'readability/multiline_string', 5,
-          'Multi-line string ("...") found.  This lint script doesn\'t '
-          'do well with such strings, and may give bogus warnings.  '
-          'Use C++11 raw strings or concatenation instead.')
-
-
-# (non-threadsafe name, thread-safe alternative, validation pattern)
-#
-# The validation pattern is used to eliminate false positives such as:
-#  _rand();               // false positive due to substring match.
-#  ->rand();              // some member function rand().
-#  ACMRandom rand(seed);  // some variable named rand.
-#  ISAACRandom rand();    // another variable named rand.
-#
-# Basically we require the return value of these functions to be used
-# in some expression context on the same line by matching on some
-# operator before the function name.  This eliminates constructors and
-# member function calls.
-_UNSAFE_FUNC_PREFIX = r'(?:[-+*/=%^&|(<]\s*|>\s+)'
-_THREADING_LIST = (
-    ('asctime(', 'asctime_r(', _UNSAFE_FUNC_PREFIX + r'asctime\([^)]+\)'),
-    ('ctime(', 'ctime_r(', _UNSAFE_FUNC_PREFIX + r'ctime\([^)]+\)'),
-    ('getgrgid(', 'getgrgid_r(', _UNSAFE_FUNC_PREFIX + r'getgrgid\([^)]+\)'),
-    ('getgrnam(', 'getgrnam_r(', _UNSAFE_FUNC_PREFIX + r'getgrnam\([^)]+\)'),
-    ('getlogin(', 'getlogin_r(', _UNSAFE_FUNC_PREFIX + r'getlogin\(\)'),
-    ('getpwnam(', 'getpwnam_r(', _UNSAFE_FUNC_PREFIX + r'getpwnam\([^)]+\)'),
-    ('getpwuid(', 'getpwuid_r(', _UNSAFE_FUNC_PREFIX + r'getpwuid\([^)]+\)'),
-    ('gmtime(', 'gmtime_r(', _UNSAFE_FUNC_PREFIX + r'gmtime\([^)]+\)'),
-    ('localtime(', 'localtime_r(', _UNSAFE_FUNC_PREFIX + r'localtime\([^)]+\)'),
-    ('rand(', 'rand_r(', _UNSAFE_FUNC_PREFIX + r'rand\(\)'),
-    ('strtok(', 'strtok_r(',
-     _UNSAFE_FUNC_PREFIX + r'strtok\([^)]+\)'),
-    ('ttyname(', 'ttyname_r(', _UNSAFE_FUNC_PREFIX + r'ttyname\([^)]+\)'),
-    )
-
-
-def CheckPosixThreading(filename, clean_lines, linenum, error):
-  """Checks for calls to thread-unsafe functions.
-
-  Much code has been originally written without consideration of
-  multi-threading. Also, engineers are relying on their old experience;
-  they have learned posix before threading extensions were added. These
-  tests guide the engineers to use thread-safe functions (when using
-  posix directly).
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-  for single_thread_func, multithread_safe_func, pattern in _THREADING_LIST:
-    # Additional pattern matching check to confirm that this is the
-    # function we are looking for
-    if Search(pattern, line):
-      error(filename, linenum, 'runtime/threadsafe_fn', 2,
-            'Consider using ' + multithread_safe_func +
-            '...) instead of ' + single_thread_func +
-            '...) for improved thread safety.')
-
-
-def CheckVlogArguments(filename, clean_lines, linenum, error):
-  """Checks that VLOG() is only used for defining a logging level.
-
-  For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and
-  VLOG(FATAL) are not.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-  if Search(r'\bVLOG\((INFO|ERROR|WARNING|DFATAL|FATAL)\)', line):
-    error(filename, linenum, 'runtime/vlog', 5,
-          'VLOG() should be used with numeric verbosity level.  '
-          'Use LOG() if you want symbolic severity levels.')
-
-# Matches invalid increment: *count++, which moves pointer instead of
-# incrementing a value.
-_RE_PATTERN_INVALID_INCREMENT = re.compile(
-    r'^\s*\*\w+(\+\+|--);')
-
-
-def CheckInvalidIncrement(filename, clean_lines, linenum, error):
-  """Checks for invalid increment *count++.
-
-  For example following function:
-  void increment_counter(int* count) {
-    *count++;
-  }
-  is invalid, because it effectively does count++, moving pointer, and should
-  be replaced with ++*count, (*count)++ or *count += 1.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-  if _RE_PATTERN_INVALID_INCREMENT.match(line):
-    error(filename, linenum, 'runtime/invalid_increment', 5,
-          'Changing pointer instead of value (or unused value of operator*).')
-
-
-def IsMacroDefinition(clean_lines, linenum):
-  if Search(r'^#define', clean_lines[linenum]):
-    return True
-
-  if linenum > 0 and Search(r'\\$', clean_lines[linenum - 1]):
-    return True
-
-  return False
-
-
-def IsForwardClassDeclaration(clean_lines, linenum):
-  return Match(r'^\s*(\btemplate\b)*.*class\s+\w+;\s*$', clean_lines[linenum])
-
-
-class _BlockInfo(object):
-  """Stores information about a generic block of code."""
-
-  def __init__(self, linenum, seen_open_brace):
-    self.starting_linenum = linenum
-    self.seen_open_brace = seen_open_brace
-    self.open_parentheses = 0
-    self.inline_asm = _NO_ASM
-    self.check_namespace_indentation = False
-
-  def CheckBegin(self, filename, clean_lines, linenum, error):
-    """Run checks that applies to text up to the opening brace.
-
-    This is mostly for checking the text after the class identifier
-    and the "{", usually where the base class is specified.  For other
-    blocks, there isn't much to check, so we always pass.
-
-    Args:
-      filename: The name of the current file.
-      clean_lines: A CleansedLines instance containing the file.
-      linenum: The number of the line to check.
-      error: The function to call with any errors found.
-    """
-    pass
-
-  def CheckEnd(self, filename, clean_lines, linenum, error):
-    """Run checks that applies to text after the closing brace.
-
-    This is mostly used for checking end of namespace comments.
-
-    Args:
-      filename: The name of the current file.
-      clean_lines: A CleansedLines instance containing the file.
-      linenum: The number of the line to check.
-      error: The function to call with any errors found.
-    """
-    pass
-
-  def IsBlockInfo(self):
-    """Returns true if this block is a _BlockInfo.
-
-    This is convenient for verifying that an object is an instance of
-    a _BlockInfo, but not an instance of any of the derived classes.
-
-    Returns:
-      True for this class, False for derived classes.
-    """
-    return self.__class__ == _BlockInfo
-
-
-class _ExternCInfo(_BlockInfo):
-  """Stores information about an 'extern "C"' block."""
-
-  def __init__(self, linenum):
-    _BlockInfo.__init__(self, linenum, True)
-
-
-class _ClassInfo(_BlockInfo):
-  """Stores information about a class."""
-
-  def __init__(self, name, class_or_struct, clean_lines, linenum):
-    _BlockInfo.__init__(self, linenum, False)
-    self.name = name
-    self.is_derived = False
-    self.check_namespace_indentation = True
-    if class_or_struct == 'struct':
-      self.access = 'public'
-      self.is_struct = True
-    else:
-      self.access = 'private'
-      self.is_struct = False
-
-    # Remember initial indentation level for this class.  Using raw_lines here
-    # instead of elided to account for leading comments.
-    self.class_indent = GetIndentLevel(clean_lines.raw_lines[linenum])
-
-    # Try to find the end of the class.  This will be confused by things like:
-    #   class A {
-    #   } *x = { ...
-    #
-    # But it's still good enough for CheckSectionSpacing.
-    self.last_line = 0
-    depth = 0
-    for i in range(linenum, clean_lines.NumLines()):
-      line = clean_lines.elided[i]
-      depth += line.count('{') - line.count('}')
-      if not depth:
-        self.last_line = i
-        break
-
-  def CheckBegin(self, filename, clean_lines, linenum, error):
-    # Look for a bare ':'
-    if Search('(^|[^:]):($|[^:])', clean_lines.elided[linenum]):
-      self.is_derived = True
-
-  def CheckEnd(self, filename, clean_lines, linenum, error):
-    # If there is a DISALLOW macro, it should appear near the end of
-    # the class.
-    seen_last_thing_in_class = False
-    for i in xrange(linenum - 1, self.starting_linenum, -1):
-      match = Search(
-          r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\(' +
-          self.name + r'\)',
-          clean_lines.elided[i])
-      if match:
-        if seen_last_thing_in_class:
-          error(filename, i, 'readability/constructors', 3,
-                match.group(1) + ' should be the last thing in the class')
-        break
-
-      if not Match(r'^\s*$', clean_lines.elided[i]):
-        seen_last_thing_in_class = True
-
-    # Check that closing brace is aligned with beginning of the class.
-    # Only do this if the closing brace is indented by only whitespaces.
-    # This means we will not check single-line class definitions.
-    indent = Match(r'^( *)\}', clean_lines.elided[linenum])
-    if indent and len(indent.group(1)) != self.class_indent:
-      if self.is_struct:
-        parent = 'struct ' + self.name
-      else:
-        parent = 'class ' + self.name
-      error(filename, linenum, 'whitespace/indent', 3,
-            'Closing brace should be aligned with beginning of %s' % parent)
-
-
-class _NamespaceInfo(_BlockInfo):
-  """Stores information about a namespace."""
-
-  def __init__(self, name, linenum):
-    _BlockInfo.__init__(self, linenum, False)
-    self.name = name or ''
-    self.check_namespace_indentation = True
-
-  def CheckEnd(self, filename, clean_lines, linenum, error):
-    """Check end of namespace comments."""
-    line = clean_lines.raw_lines[linenum]
-
-    # Check how many lines is enclosed in this namespace.  Don't issue
-    # warning for missing namespace comments if there aren't enough
-    # lines.  However, do apply checks if there is already an end of
-    # namespace comment and it's incorrect.
-    #
-    # TODO(unknown): We always want to check end of namespace comments
-    # if a namespace is large, but sometimes we also want to apply the
-    # check if a short namespace contained nontrivial things (something
-    # other than forward declarations).  There is currently no logic on
-    # deciding what these nontrivial things are, so this check is
-    # triggered by namespace size only, which works most of the time.
-    if (linenum - self.starting_linenum < 10
-        and not Match(r'^\s*};*\s*(//|/\*).*\bnamespace\b', line)):
-      return
-
-    # Look for matching comment at end of namespace.
-    #
-    # Note that we accept C style "/* */" comments for terminating
-    # namespaces, so that code that terminate namespaces inside
-    # preprocessor macros can be cpplint clean.
-    #
-    # We also accept stuff like "// end of namespace <name>." with the
-    # period at the end.
-    #
-    # Besides these, we don't accept anything else, otherwise we might
-    # get false negatives when existing comment is a substring of the
-    # expected namespace.
-    if self.name:
-      # Named namespace
-      if not Match((r'^\s*};*\s*(//|/\*).*\bnamespace\s+' +
-                    re.escape(self.name) + r'[\*/\.\\\s]*$'),
-                   line):
-        error(filename, linenum, 'readability/namespace', 5,
-              'Namespace should be terminated with "// namespace %s"' %
-              self.name)
-    else:
-      # Anonymous namespace
-      if not Match(r'^\s*};*\s*(//|/\*).*\bnamespace[\*/\.\\\s]*$', line):
-        # If "// namespace anonymous" or "// anonymous namespace (more text)",
-        # mention "// anonymous namespace" as an acceptable form
-        if Match(r'^\s*}.*\b(namespace anonymous|anonymous namespace)\b', line):
-          error(filename, linenum, 'readability/namespace', 5,
-                'Anonymous namespace should be terminated with "// namespace"'
-                ' or "// anonymous namespace"')
-        else:
-          error(filename, linenum, 'readability/namespace', 5,
-                'Anonymous namespace should be terminated with "// namespace"')
-
-
-class _PreprocessorInfo(object):
-  """Stores checkpoints of nesting stacks when #if/#else is seen."""
-
-  def __init__(self, stack_before_if):
-    # The entire nesting stack before #if
-    self.stack_before_if = stack_before_if
-
-    # The entire nesting stack up to #else
-    self.stack_before_else = []
-
-    # Whether we have already seen #else or #elif
-    self.seen_else = False
-
-
-class NestingState(object):
-  """Holds states related to parsing braces."""
-
-  def __init__(self):
-    # Stack for tracking all braces.  An object is pushed whenever we
-    # see a "{", and popped when we see a "}".  Only 3 types of
-    # objects are possible:
-    # - _ClassInfo: a class or struct.
-    # - _NamespaceInfo: a namespace.
-    # - _BlockInfo: some other type of block.
-    self.stack = []
-
-    # Top of the previous stack before each Update().
-    #
-    # Because the nesting_stack is updated at the end of each line, we
-    # had to do some convoluted checks to find out what is the current
-    # scope at the beginning of the line.  This check is simplified by
-    # saving the previous top of nesting stack.
-    #
-    # We could save the full stack, but we only need the top.  Copying
-    # the full nesting stack would slow down cpplint by ~10%.
-    self.previous_stack_top = []
-
-    # Stack of _PreprocessorInfo objects.
-    self.pp_stack = []
-
-  def SeenOpenBrace(self):
-    """Check if we have seen the opening brace for the innermost block.
-
-    Returns:
-      True if we have seen the opening brace, False if the innermost
-      block is still expecting an opening brace.
-    """
-    return (not self.stack) or self.stack[-1].seen_open_brace
-
-  def InNamespaceBody(self):
-    """Check if we are currently one level inside a namespace body.
-
-    Returns:
-      True if top of the stack is a namespace block, False otherwise.
-    """
-    return self.stack and isinstance(self.stack[-1], _NamespaceInfo)
-
-  def InExternC(self):
-    """Check if we are currently one level inside an 'extern "C"' block.
-
-    Returns:
-      True if top of the stack is an extern block, False otherwise.
-    """
-    return self.stack and isinstance(self.stack[-1], _ExternCInfo)
-
-  def InClassDeclaration(self):
-    """Check if we are currently one level inside a class or struct declaration.
-
-    Returns:
-      True if top of the stack is a class/struct, False otherwise.
-    """
-    return self.stack and isinstance(self.stack[-1], _ClassInfo)
-
-  def InAsmBlock(self):
-    """Check if we are currently one level inside an inline ASM block.
-
-    Returns:
-      True if the top of the stack is a block containing inline ASM.
-    """
-    return self.stack and self.stack[-1].inline_asm != _NO_ASM
-
-  def InTemplateArgumentList(self, clean_lines, linenum, pos):
-    """Check if current position is inside template argument list.
-
-    Args:
-      clean_lines: A CleansedLines instance containing the file.
-      linenum: The number of the line to check.
-      pos: position just after the suspected template argument.
-    Returns:
-      True if (linenum, pos) is inside template arguments.
-    """
-    while linenum < clean_lines.NumLines():
-      # Find the earliest character that might indicate a template argument
-      line = clean_lines.elided[linenum]
-      match = Match(r'^[^{};=\[\]\.<>]*(.)', line[pos:])
-      if not match:
-        linenum += 1
-        pos = 0
-        continue
-      token = match.group(1)
-      pos += len(match.group(0))
-
-      # These things do not look like template argument list:
-      #   class Suspect {
-      #   class Suspect x; }
-      if token in ('{', '}', ';'): return False
-
-      # These things look like template argument list:
-      #   template <class Suspect>
-      #   template <class Suspect = default_value>
-      #   template <class Suspect[]>
-      #   template <class Suspect...>
-      if token in ('>', '=', '[', ']', '.'): return True
-
-      # Check if token is an unmatched '<'.
-      # If not, move on to the next character.
-      if token != '<':
-        pos += 1
-        if pos >= len(line):
-          linenum += 1
-          pos = 0
-        continue
-
-      # We can't be sure if we just find a single '<', and need to
-      # find the matching '>'.
-      (_, end_line, end_pos) = CloseExpression(clean_lines, linenum, pos - 1)
-      if end_pos < 0:
-        # Not sure if template argument list or syntax error in file
-        return False
-      linenum = end_line
-      pos = end_pos
-    return False
-
-  def UpdatePreprocessor(self, line):
-    """Update preprocessor stack.
-
-    We need to handle preprocessors due to classes like this:
-      #ifdef SWIG
-      struct ResultDetailsPageElementExtensionPoint {
-      #else
-      struct ResultDetailsPageElementExtensionPoint : public Extension {
-      #endif
-
-    We make the following assumptions (good enough for most files):
-    - Preprocessor condition evaluates to true from #if up to first
-      #else/#elif/#endif.
-
-    - Preprocessor condition evaluates to false from #else/#elif up
-      to #endif.  We still perform lint checks on these lines, but
-      these do not affect nesting stack.
-
-    Args:
-      line: current line to check.
-    """
-    if Match(r'^\s*#\s*(if|ifdef|ifndef)\b', line):
-      # Beginning of #if block, save the nesting stack here.  The saved
-      # stack will allow us to restore the parsing state in the #else case.
-      self.pp_stack.append(_PreprocessorInfo(copy.deepcopy(self.stack)))
-    elif Match(r'^\s*#\s*(else|elif)\b', line):
-      # Beginning of #else block
-      if self.pp_stack:
-        if not self.pp_stack[-1].seen_else:
-          # This is the first #else or #elif block.  Remember the
-          # whole nesting stack up to this point.  This is what we
-          # keep after the #endif.
-          self.pp_stack[-1].seen_else = True
-          self.pp_stack[-1].stack_before_else = copy.deepcopy(self.stack)
-
-        # Restore the stack to how it was before the #if
-        self.stack = copy.deepcopy(self.pp_stack[-1].stack_before_if)
-      else:
-        # TODO(unknown): unexpected #else, issue warning?
-        pass
-    elif Match(r'^\s*#\s*endif\b', line):
-      # End of #if or #else blocks.
-      if self.pp_stack:
-        # If we saw an #else, we will need to restore the nesting
-        # stack to its former state before the #else, otherwise we
-        # will just continue from where we left off.
-        if self.pp_stack[-1].seen_else:
-          # Here we can just use a shallow copy since we are the last
-          # reference to it.
-          self.stack = self.pp_stack[-1].stack_before_else
-        # Drop the corresponding #if
-        self.pp_stack.pop()
-      else:
-        # TODO(unknown): unexpected #endif, issue warning?
-        pass
-
-  # TODO(unknown): Update() is too long, but we will refactor later.
-  def Update(self, filename, clean_lines, linenum, error):
-    """Update nesting state with current line.
-
-    Args:
-      filename: The name of the current file.
-      clean_lines: A CleansedLines instance containing the file.
-      linenum: The number of the line to check.
-      error: The function to call with any errors found.
-    """
-    line = clean_lines.elided[linenum]
-
-    # Remember top of the previous nesting stack.
-    #
-    # The stack is always pushed/popped and not modified in place, so
-    # we can just do a shallow copy instead of copy.deepcopy.  Using
-    # deepcopy would slow down cpplint by ~28%.
-    if self.stack:
-      self.previous_stack_top = self.stack[-1]
-    else:
-      self.previous_stack_top = None
-
-    # Update pp_stack
-    self.UpdatePreprocessor(line)
-
-    # Count parentheses.  This is to avoid adding struct arguments to
-    # the nesting stack.
-    if self.stack:
-      inner_block = self.stack[-1]
-      depth_change = line.count('(') - line.count(')')
-      inner_block.open_parentheses += depth_change
-
-      # Also check if we are starting or ending an inline assembly block.
-      if inner_block.inline_asm in (_NO_ASM, _END_ASM):
-        if (depth_change != 0 and
-            inner_block.open_parentheses == 1 and
-            _MATCH_ASM.match(line)):
-          # Enter assembly block
-          inner_block.inline_asm = _INSIDE_ASM
-        else:
-          # Not entering assembly block.  If previous line was _END_ASM,
-          # we will now shift to _NO_ASM state.
-          inner_block.inline_asm = _NO_ASM
-      elif (inner_block.inline_asm == _INSIDE_ASM and
-            inner_block.open_parentheses == 0):
-        # Exit assembly block
-        inner_block.inline_asm = _END_ASM
-
-    # Consume namespace declaration at the beginning of the line.  Do
-    # this in a loop so that we catch same line declarations like this:
-    #   namespace proto2 { namespace bridge { class MessageSet; } }
-    while True:
-      # Match start of namespace.  The "\b\s*" below catches namespace
-      # declarations even if it weren't followed by a whitespace, this
-      # is so that we don't confuse our namespace checker.  The
-      # missing spaces will be flagged by CheckSpacing.
-      namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$', line)
-      if not namespace_decl_match:
-        break
-
-      new_namespace = _NamespaceInfo(namespace_decl_match.group(1), linenum)
-      self.stack.append(new_namespace)
-
-      line = namespace_decl_match.group(2)
-      if line.find('{') != -1:
-        new_namespace.seen_open_brace = True
-        line = line[line.find('{') + 1:]
-
-    # Look for a class declaration in whatever is left of the line
-    # after parsing namespaces.  The regexp accounts for decorated classes
-    # such as in:
-    #   class LOCKABLE API Object {
-    #   };
-    class_decl_match = Match(
-        r'^(\s*(?:template\s*<[\w\s<>,:=]*>\s*)?'
-        r'(class|struct)\s+(?:[A-Z_]+\s+)*(\w+(?:::\w+)*))'
-        r'(.*)$', line)
-    if (class_decl_match and
-        (not self.stack or self.stack[-1].open_parentheses == 0)):
-      # We do not want to accept classes that are actually template arguments:
-      #   template <class Ignore1,
-      #             class Ignore2 = Default<Args>,
-      #             template <Args> class Ignore3>
-      #   void Function() {};
-      #
-      # To avoid template argument cases, we scan forward and look for
-      # an unmatched '>'.  If we see one, assume we are inside a
-      # template argument list.
-      end_declaration = len(class_decl_match.group(1))
-      if not self.InTemplateArgumentList(clean_lines, linenum, end_declaration):
-        self.stack.append(_ClassInfo(
-            class_decl_match.group(3), class_decl_match.group(2),
-            clean_lines, linenum))
-        line = class_decl_match.group(4)
-
-    # If we have not yet seen the opening brace for the innermost block,
-    # run checks here.
-    if not self.SeenOpenBrace():
-      self.stack[-1].CheckBegin(filename, clean_lines, linenum, error)
-
-    # Update access control if we are inside a class/struct
-    if self.stack and isinstance(self.stack[-1], _ClassInfo):
-      classinfo = self.stack[-1]
-      access_match = Match(
-          r'^(.*)\b(public|private|protected|signals)(\s+(?:slots\s*)?)?'
-          r':(?:[^:]|$)',
-          line)
-      if access_match:
-        classinfo.access = access_match.group(2)
-
-        # Check that access keywords are indented +1 space.  Skip this
-        # check if the keywords are not preceded by whitespaces.
-        indent = access_match.group(1)
-        if (len(indent) != classinfo.class_indent + 1 and
-            Match(r'^\s*$', indent)):
-          if classinfo.is_struct:
-            parent = 'struct ' + classinfo.name
-          else:
-            parent = 'class ' + classinfo.name
-          slots = ''
-          if access_match.group(3):
-            slots = access_match.group(3)
-          error(filename, linenum, 'whitespace/indent', 3,
-                '%s%s: should be indented +1 space inside %s' % (
-                    access_match.group(2), slots, parent))
-
-    # Consume braces or semicolons from what's left of the line
-    while True:
-      # Match first brace, semicolon, or closed parenthesis.
-      matched = Match(r'^[^{;)}]*([{;)}])(.*)$', line)
-      if not matched:
-        break
-
-      token = matched.group(1)
-      if token == '{':
-        # If namespace or class hasn't seen a opening brace yet, mark
-        # namespace/class head as complete.  Push a new block onto the
-        # stack otherwise.
-        if not self.SeenOpenBrace():
-          self.stack[-1].seen_open_brace = True
-        elif Match(r'^extern\s*"[^"]*"\s*\{', line):
-          self.stack.append(_ExternCInfo(linenum))
-        else:
-          self.stack.append(_BlockInfo(linenum, True))
-          if _MATCH_ASM.match(line):
-            self.stack[-1].inline_asm = _BLOCK_ASM
-
-      elif token == ';' or token == ')':
-        # If we haven't seen an opening brace yet, but we already saw
-        # a semicolon, this is probably a forward declaration.  Pop
-        # the stack for these.
-        #
-        # Similarly, if we haven't seen an opening brace yet, but we
-        # already saw a closing parenthesis, then these are probably
-        # function arguments with extra "class" or "struct" keywords.
-        # Also pop these stack for these.
-        if not self.SeenOpenBrace():
-          self.stack.pop()
-      else:  # token == '}'
-        # Perform end of block checks and pop the stack.
-        if self.stack:
-          self.stack[-1].CheckEnd(filename, clean_lines, linenum, error)
-          self.stack.pop()
-      line = matched.group(2)
-
-  def InnermostClass(self):
-    """Get class info on the top of the stack.
-
-    Returns:
-      A _ClassInfo object if we are inside a class, or None otherwise.
-    """
-    for i in range(len(self.stack), 0, -1):
-      classinfo = self.stack[i - 1]
-      if isinstance(classinfo, _ClassInfo):
-        return classinfo
-    return None
-
-  def CheckCompletedBlocks(self, filename, error):
-    """Checks that all classes and namespaces have been completely parsed.
-
-    Call this when all lines in a file have been processed.
-    Args:
-      filename: The name of the current file.
-      error: The function to call with any errors found.
-    """
-    # Note: This test can result in false positives if #ifdef constructs
-    # get in the way of brace matching. See the testBuildClass test in
-    # cpplint_unittest.py for an example of this.
-    for obj in self.stack:
-      if isinstance(obj, _ClassInfo):
-        error(filename, obj.starting_linenum, 'build/class', 5,
-              'Failed to find complete declaration of class %s' %
-              obj.name)
-      elif isinstance(obj, _NamespaceInfo):
-        error(filename, obj.starting_linenum, 'build/namespaces', 5,
-              'Failed to find complete declaration of namespace %s' %
-              obj.name)
-
-
-def CheckForNonStandardConstructs(filename, clean_lines, linenum,
-                                  nesting_state, error):
-  r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
-
-  Complain about several constructs which gcc-2 accepts, but which are
-  not standard C++.  Warning about these in lint is one way to ease the
-  transition to new compilers.
-  - put storage class first (e.g. "static const" instead of "const static").
-  - "%lld" instead of %qd" in printf-type functions.
-  - "%1$d" is non-standard in printf-type functions.
-  - "\%" is an undefined character escape sequence.
-  - text after #endif is not allowed.
-  - invalid inner-style forward declaration.
-  - >? and <? operators, and their >?= and <?= cousins.
-
-  Additionally, check for constructor/destructor style violations and reference
-  members, as it is very convenient to do so while checking for
-  gcc-2 compliance.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    nesting_state: A NestingState instance which maintains information about
-                   the current stack of nested blocks being parsed.
-    error: A callable to which errors are reported, which takes 4 arguments:
-           filename, line number, error level, and message
-  """
-
-  # Remove comments from the line, but leave in strings for now.
-  line = clean_lines.lines[linenum]
-
-  if Search(r'printf\s*\(.*".*%[-+ ]?\d*q', line):
-    error(filename, linenum, 'runtime/printf_format', 3,
-          '%q in format strings is deprecated.  Use %ll instead.')
-
-  if Search(r'printf\s*\(.*".*%\d+\$', line):
-    error(filename, linenum, 'runtime/printf_format', 2,
-          '%N$ formats are unconventional.  Try rewriting to avoid them.')
-
-  # Remove escaped backslashes before looking for undefined escapes.
-  line = line.replace('\\\\', '')
-
-  if Search(r'("|\').*\\(%|\[|\(|{)', line):
-    error(filename, linenum, 'build/printf_format', 3,
-          '%, [, (, and { are undefined character escapes.  Unescape them.')
-
-  # For the rest, work with both comments and strings removed.
-  line = clean_lines.elided[linenum]
-
-  if Search(r'\b(const|volatile|void|char|short|int|long'
-            r'|float|double|signed|unsigned'
-            r'|schar|u?int8|u?int16|u?int32|u?int64)'
-            r'\s+(register|static|extern|typedef)\b',
-            line):
-    error(filename, linenum, 'build/storage_class', 5,
-          'Storage-class specifier (static, extern, typedef, etc) should be '
-          'at the beginning of the declaration.')
-
-  if Match(r'\s*#\s*endif\s*[^/\s]+', line):
-    error(filename, linenum, 'build/endif_comment', 5,
-          'Uncommented text after #endif is non-standard.  Use a comment.')
-
-  if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line):
-    error(filename, linenum, 'build/forward_decl', 5,
-          'Inner-style forward declarations are invalid.  Remove this line.')
-
-  if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?',
-            line):
-    error(filename, linenum, 'build/deprecated', 3,
-          '>? and <? (max and min) operators are non-standard and deprecated.')
-
-  if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line):
-    # TODO(unknown): Could it be expanded safely to arbitrary references,
-    # without triggering too many false positives? The first
-    # attempt triggered 5 warnings for mostly benign code in the regtest, hence
-    # the restriction.
-    # Here's the original regexp, for the reference:
-    # type_name = r'\w+((\s*::\s*\w+)|(\s*<\s*\w+?\s*>))?'
-    # r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;'
-    error(filename, linenum, 'runtime/member_string_references', 2,
-          'const string& members are dangerous. It is much better to use '
-          'alternatives, such as pointers or simple constants.')
-
-  # Everything else in this function operates on class declarations.
-  # Return early if the top of the nesting stack is not a class, or if
-  # the class head is not completed yet.
-  classinfo = nesting_state.InnermostClass()
-  if not classinfo or not classinfo.seen_open_brace:
-    return
-
-  # The class may have been declared with namespace or classname qualifiers.
-  # The constructor and destructor will not have those qualifiers.
-  base_classname = classinfo.name.split('::')[-1]
-
-  # Look for single-argument constructors that aren't marked explicit.
-  # Technically a valid construct, but against style.
-  explicit_constructor_match = Match(
-      r'\s+(?:inline\s+)?(explicit\s+)?(?:inline\s+)?%s\s*'
-      r'\(((?:[^()]|\([^()]*\))*)\)'
-      % re.escape(base_classname),
-      line)
-
-  if explicit_constructor_match:
-    is_marked_explicit = explicit_constructor_match.group(1)
-
-    if not explicit_constructor_match.group(2):
-      constructor_args = []
-    else:
-      constructor_args = explicit_constructor_match.group(2).split(',')
-
-    # collapse arguments so that commas in template parameter lists and function
-    # argument parameter lists don't split arguments in two
-    i = 0
-    while i < len(constructor_args):
-      constructor_arg = constructor_args[i]
-      while (constructor_arg.count('<') > constructor_arg.count('>') or
-             constructor_arg.count('(') > constructor_arg.count(')')):
-        constructor_arg += ',' + constructor_args[i + 1]
-        del constructor_args[i + 1]
-      constructor_args[i] = constructor_arg
-      i += 1
-
-    variadic_args = [arg for arg in constructor_args if '&&...' in arg]
-    defaulted_args = [arg for arg in constructor_args if '=' in arg]
-    noarg_constructor = (not constructor_args or  # empty arg list
-                         # 'void' arg specifier
-                         (len(constructor_args) == 1 and
-                          constructor_args[0].strip() == 'void'))
-    onearg_constructor = ((len(constructor_args) == 1 and  # exactly one arg
-                           not noarg_constructor) or
-                          # all but at most one arg defaulted
-                          (len(constructor_args) >= 1 and
-                           not noarg_constructor and
-                           len(defaulted_args) >= len(constructor_args) - 1) or
-                          # variadic arguments with zero or one argument
-                          (len(constructor_args) <= 2 and
-                           len(variadic_args) >= 1))
-    initializer_list_constructor = bool(
-        onearg_constructor and
-        Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0]))
-    copy_constructor = bool(
-        onearg_constructor and
-        Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&'
-              % re.escape(base_classname), constructor_args[0].strip()))
-
-    if (not is_marked_explicit and
-        onearg_constructor and
-        not initializer_list_constructor and
-        not copy_constructor):
-      if defaulted_args or variadic_args:
-        error(filename, linenum, 'runtime/explicit', 5,
-              'Constructors callable with one argument '
-              'should be marked explicit.')
-      else:
-        error(filename, linenum, 'runtime/explicit', 5,
-              'Single-parameter constructors should be marked explicit.')
-    elif is_marked_explicit and not onearg_constructor:
-      if noarg_constructor:
-        error(filename, linenum, 'runtime/explicit', 5,
-              'Zero-parameter constructors should not be marked explicit.')
-
-
-def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error):
-  """Checks for the correctness of various spacing around function calls.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    error: The function to call with any errors found.
-  """
-  line = clean_lines.elided[linenum]
-
-  # Since function calls often occur inside if/for/while/switch
-  # expressions - which have their own, more liberal conventions - we
-  # first see if we should be looking inside such an expression for a
-  # function call, to which we can apply more strict standards.
-  fncall = line    # if there's no control flow construct, look at whole line
-  for pattern in (r'\bif\s*\((.*)\)\s*{',
-                  r'\bfor\s*\((.*)\)\s*{',
-                  r'\bwhile\s*\((.*)\)\s*[{;]',
-                  r'\bswitch\s*\((.*)\)\s*{'):
-    match = Search(pattern, line)
-    if match:
-      fncall = match.group(1)    # look inside the parens for function calls
-      break
-
-  # Except in if/for/while/switch, there should never be space
-  # immediately inside parens (eg "f( 3, 4 )").  We make an exception
-  # for nested parens ( (a+b) + c ).  Likewise, there should never be
-  # a space before a ( when it's a function argument.  I assume it's a
-  # function argument when the char before the whitespace is legal in
-  # a function name (alnum + _) and we're not starting a macro. Also ignore
-  # pointers and references to arrays and functions coz they're too tricky:
-  # we use a very simple way to recognize these:
-  # " (something)(maybe-something)" or
-  # " (something)(maybe-something," or
-  # " (something)[something]"
-  # Note that we assume the contents of [] to be short enough that
-  # they'll never need to wrap.
-  if (  # Ignore control structures.
-      not Search(r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b',
-                 fncall) and
-      # Ignore pointers/references to functions.
-      not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and
-      # Ignore pointers/references to arrays.
-      not Search(r' \([^)]+\)\[[^\]]+\]', fncall)):
-    if Search(r'\w\s*\(\s(?!\s*\\$)', fncall):      # a ( used for a fn call
-      error(filename, linenum, 'whitespace/parens', 4,
-            'Extra space after ( in function call')
-    elif Search(r'\(\s+(?!(\s*\\)|\()', fncall):
-      error(filename, linenum, 'whitespace/parens', 2,
-            'Extra space after (')
-    if (Search(r'\w\s+\(', fncall) and
-        not Search(r'_{0,2}asm_{0,2}\s+_{0,2}volatile_{0,2}\s+\(', fncall) and
-        not Search(r'#\s*define|typedef|using\s+\w+\s*=', fncall) and
-        not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall) and
-        not Search(r'\b(' + '|'.join(_ALT_TOKEN_REPLACEMENT.keys()) + r')\b\s+\(',
-                   fncall) and
-        not Search(r'\bcase\s+\(', fncall)):
-      # TODO(unknown): Space after an operator function seem to be a common
-      # error, silence those for now by restricting them to highest verbosity.
-      if Search(r'\boperator_*\b', line):
-        error(filename, linenum, 'whitespace/parens', 0,
-              'Extra space before ( in function call')
-      else:
-        error(filename, linenum, 'whitespace/parens', 4,
-              'Extra space before ( in function call')
-    # If the ) is followed only by a newline or a { + newline, assume it's
-    # part of a control statement (if/while/etc), and don't complain
-    if Search(r'[^)]\s+\)\s*[^{\s]', fncall):
-      # If the closing parenthesis is preceded by only whitespaces,
-      # try to give a more descriptive error message.
-      if Search(r'^\s+\)', fncall):
-        error(filename, linenum, 'whitespace/parens', 2,
-              'Closing ) should be moved to the previous line')
-      else:
-        error(filename, linenum, 'whitespace/parens', 2,
-              'Extra space before )')
-
-
-def IsBlankLine(line):
-  """Returns true if the given line is blank.
-
-  We consider a line to be blank if the line is empty or consists of
-  only white spaces.
-
-  Args:
-    line: A line of a string.
-
-  Returns:
-    True, if the given line is blank.
-  """
-  return not line or line.isspace()
-
-
-def CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line,
-                                 error):
-  is_namespace_indent_item = (
-      len(nesting_state.stack) > 1 and
-      nesting_state.stack[-1].check_namespace_indentation and
-      isinstance(nesting_state.previous_stack_top, _NamespaceInfo) and
-      nesting_state.previous_stack_top == nesting_state.stack[-2])
-
-  if ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item,
-                                     clean_lines.elided, line):
-    CheckItemIndentationInNamespace(filename, clean_lines.elided,
-                                    line, error)
-
-
-def CheckForFunctionLengths(filename, clean_lines, linenum,
-                            function_state, error):
-  """Reports for long function bodies.
-
-  For an overview why this is done, see:
-  https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Write_Short_Functions
-
-  Uses a simplistic algorithm assuming other style guidelines
-  (especially spacing) are followed.
-  Only checks unindented functions, so class members are unchecked.
-  Trivial bodies are unchecked, so constructors with huge initializer lists
-  may be missed.
-  Blank/comment lines are not counted so as to avoid encouraging the removal
-  of vertical space and comments just to get through a lint check.
-  NOLINT *on the last line of a function* disables this check.
-
-  Args:
-    filename: The name of the current file.
-    clean_lines: A CleansedLines instance containing the file.
-    linenum: The number of the line to check.
-    function_state: Current function name and lines in body so far.
-    error: The function to call with any errors found.
-  """
-  lines = clean_lines.lines
-  line = lines[linenum]
-  joined_line = ''
-
-  starting_func = False
-  regexp = r'(\w(\w|::|\*|\&|\s)*)\('  # decls * & space::name( ...
-  match_result = Match(regexp, line)
-  if match_result:
-    # If the name is all caps and underscores, figure it's a macro and
-    # ignore it, unless it's TEST or TEST_F.
-    function_name = match_result.group(1).split()[-1]
-    if function_name == 'TEST' or function_name == 'TEST_F' or (
-        not Match(r'[A-Z_]+$', function_name)):
-      starting_func = True
-
-  if starting_func:
-    body_found = False
-    for start_linenum in range(linenum, clean_lines.NumLines()):
-      start_line = lines[start_linenum]
-      joined_line += ' ' + start_line.lstrip()
-      if Search(r'(;|})', start_line):  # Declarations and trivial functions
-        body_found = True
-        break                              # ... ignore
-      elif Search(r'{', start_line):
-        body_found = True
-        function = Search(r'((\w|:)*)\(', line).group(1)
-        if Match(r'TEST', function):    # Handle TEST... macros
-          parameter_regexp = Search(r'(\(.*\))', joined_line)
-          if parameter_regexp:             # Ignore bad syntax
-            function += parameter_regexp.group(1)
-        else:
-          function += '()'
-        function_state.Begin(function)
-        break
-    if not body_found:
-      # No body for the function (or evidence of a non-function) was found.
-      error(filename, linenum, 'readability/fn_size', 5,
-            'Lint failed to find start of function body.')
-  elif Match(r'^\}\s*$', line):  # function end
-    function_state.Check(error, filename, linenum)
-    function_state.End()
-  elif not Match(r'^\s*$', line):
-    function_state.Count()  # Count non-blank/non-comment lines.
-
-
-_RE_PATTERN_TODO = re.compile(r'^//(\s*)TODO(\(.+?\))?:?(\s|$)?')
-
-
-def CheckComment(line, filename, linenum, next_line_start, error):
-  """Checks for common mistakes in comments.
... 490241 lines suppressed ...

[arrow-rs] 04/14: Removed matlab.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit db557f25fc1448c10481e0418706b283f618bff2
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:20:01 2021 +0000

    Removed matlab.
---
 matlab/.gitignore                                  |  23 --
 matlab/CMakeLists.txt                              |  60 ----
 matlab/README.md                                   | 112 -------
 matlab/build_support/common_vars.m                 |  24 --
 matlab/build_support/compile.m                     |  41 ---
 matlab/build_support/test.m                        |  28 --
 .../matlab_interface_for_apache_arrow_design.md    | 366 ---------------------
 matlab/src/+mlarrow/+util/createMetadataStruct.m   |  24 --
 matlab/src/+mlarrow/+util/createVariableStruct.m   |  24 --
 .../+util/makeValidMATLABTableVariableNames.m      |  42 ---
 matlab/src/+mlarrow/+util/table2mlarrow.m          |  83 -----
 matlab/src/feather_reader.cc                       | 267 ---------------
 matlab/src/feather_reader.h                        |  77 -----
 matlab/src/feather_writer.cc                       | 338 -------------------
 matlab/src/feather_writer.h                        |  73 ----
 matlab/src/featherread.m                           |  90 -----
 matlab/src/featherreadmex.cc                       |  37 ---
 matlab/src/featherwrite.m                          |  44 ---
 matlab/src/featherwritemex.cc                      |  37 ---
 matlab/src/matlab_traits.h                         | 103 ------
 matlab/src/util/handle_status.cc                   |  91 -----
 matlab/src/util/handle_status.h                    |  32 --
 matlab/src/util/unicode_conversion.cc              |  63 ----
 matlab/src/util/unicode_conversion.h               |  32 --
 matlab/test/tfeather.m                             | 232 -------------
 matlab/test/tfeathermex.m                          |  76 -----
 matlab/test/util/createTable.m                     |  68 ----
 .../test/util/createVariablesAndMetadataStructs.m  |  98 ------
 matlab/test/util/featherMEXRoundTrip.m             |  22 --
 matlab/test/util/featherRoundTrip.m                |  22 --
 30 files changed, 2629 deletions(-)

diff --git a/matlab/.gitignore b/matlab/.gitignore
deleted file mode 100644
index e89b1b9..0000000
--- a/matlab/.gitignore
+++ /dev/null
@@ -1,23 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# CMake files
-CMakeFiles/*
-CMakeCache.txt
-
-# MEX files
-*.mex*
diff --git a/matlab/CMakeLists.txt b/matlab/CMakeLists.txt
deleted file mode 100644
index fb80670..0000000
--- a/matlab/CMakeLists.txt
+++ /dev/null
@@ -1,60 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-cmake_minimum_required(VERSION 3.2)
-set(CMAKE_CXX_STANDARD 11)
-
-set(MLARROW_VERSION "4.0.0-SNAPSHOT")
-string(REGEX MATCH "^[0-9]+\\.[0-9]+\\.[0-9]+" MLARROW_BASE_VERSION "${MLARROW_VERSION}")
-
-project(mlarrow VERSION "${MLARROW_BASE_VERSION}")
-
-# Grab CMAKE Modules from the CPP interface
-set(CPP_CMAKE_MODULES "${CMAKE_SOURCE_DIR}/../cpp/cmake_modules")
-if(EXISTS "${CPP_CMAKE_MODULES}")
-  set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CPP_CMAKE_MODULES})
-endif()
-
-## Arrow is Required
-find_package(Arrow REQUIRED)
-
-## MATLAB is required to be installed to build MEX interfaces
-set(MATLAB_ADDITIONAL_VERSIONS "R2018a=9.4")
-find_package(Matlab REQUIRED MX_LIBRARY)
-
-# Build featherread mex file based on the arrow shared library
-matlab_add_mex(NAME
-               featherreadmex
-               SRC
-               src/featherreadmex.cc
-               src/feather_reader.cc
-               src/util/handle_status.cc
-               src/util/unicode_conversion.cc
-               LINK_TO
-               ${ARROW_SHARED_LIB})
-target_include_directories(featherreadmex PRIVATE ${ARROW_INCLUDE_DIR})
-
-# Build featherwrite mex file based on the arrow shared library
-matlab_add_mex(NAME
-               featherwritemex
-               SRC
-               src/featherwritemex.cc
-               src/feather_writer.cc
-               src/util/handle_status.cc
-               LINK_TO
-               ${ARROW_SHARED_LIB})
-target_include_directories(featherwritemex PRIVATE ${ARROW_INCLUDE_DIR})
diff --git a/matlab/README.md b/matlab/README.md
deleted file mode 100644
index edf991e..0000000
--- a/matlab/README.md
+++ /dev/null
@@ -1,112 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-## MATLAB Library for Apache Arrow
-
-## Status
-
-This is a very early stage MATLAB interface to the Apache Arrow C++ libraries.
-
-The current code only supports reading/writing numeric types from/to Feather files.
-
-## Building from source
-
-### Get Arrow and build Arrow CPP
-
-See: [Arrow CPP README](../cpp/README.md)
-
-### Build MATLAB interface to Apache Arrow using MATLAB R2018a:
-
-    cd arrow/matlab
-    mkdir build
-    cd build
-    cmake ..
-    make
-
-#### Non-standard MATLAB and Arrow installations
-
-To specify a non-standard MATLAB install location, use the Matlab_ROOT_DIR CMake flag:
-
-    cmake .. -DMatlab_ROOT_DIR=/<PATH_TO_MATLAB_INSTALL>
-
-To specify a non-standard Arrow install location, use the ARROW_HOME CMake flag:
-
-    cmake .. -DARROW_HOME=/<PATH_TO_ARROW_INSTALL>
-
-### Build MATLAB interface to Arrow using MATLAB R2018b or later:
-
-This may be preferred if you are using MATLAB R2018b or later and have encountered [linker errors](https://gitlab.kitware.com/cmake/cmake/issues/18391) when using CMake.
-
-Prerequisite: Ensure that the Arrow C++ library is already installed and the `ARROW_HOME` environment variable is set to the installation root.
-
-To verify this, you can run:
-
-``` matlab
->> getenv ARROW_HOME
-```
-
-This should print a path that contains `include` and `lib` directories with Arrow C++ headers and libraries.
-
-Navigate to the `build_support` subfolder and run the `compile` function to build the necessary MEX files:
-
-``` matlab
->> cd build_support
->> compile
-```
-
-Run the `test` function to execute the unit tests:
-
-``` matlab
->> test
-```
-
-## Try it out
-
-### Add the src and build directories to your MATLAB path
-
-``` matlab
->> cd(fullfile('arrow', 'matlab'));
->> addpath src;
->> addpath build;
-```
-
-### Write a MATLAB table to a Feather file
-
-``` matlab
->> t = array2table(rand(10, 10));
->> filename = 'table.feather';
->> featherwrite(filename,t);
-```
-
-### Read a Feather file into a MATLAB table
-
-``` matlab
->> filename = 'table.feather';
->> t = featherread(filename);
-```
-
-## Running the tests
-
-``` matlab
->> cd(fullfile('arrow', 'matlab'));
->> addpath src;
->> addpath build;
->> cd test;
->> runtests .;
-```
diff --git a/matlab/build_support/common_vars.m b/matlab/build_support/common_vars.m
deleted file mode 100644
index a7c9d6a..0000000
--- a/matlab/build_support/common_vars.m
+++ /dev/null
@@ -1,24 +0,0 @@
-function vars = common_vars()
-% Licensed to the Apache Software Foundation (ASF) under one
-% or more contributor license agreements.  See the NOTICE file
-% distributed with this work for additional information
-% regarding copyright ownership.  The ASF licenses this file
-% to you under the Apache License, Version 2.0 (the
-% "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing,
-% software distributed under the License is distributed on an
-% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-% KIND, either express or implied.  See the License for the
-% specific language governing permissions and limitations
-% under the License.
-
-fileDir = fileparts(mfilename("fullpath"));
-
-vars.srcDir = fullfile(fileDir, "..", "src");
-vars.testDir = fullfile(fileDir, "..", "test");
-vars.buildDir = fullfile(fileDir, "..", "build");
-end
\ No newline at end of file
diff --git a/matlab/build_support/compile.m b/matlab/build_support/compile.m
deleted file mode 100644
index d436dad..0000000
--- a/matlab/build_support/compile.m
+++ /dev/null
@@ -1,41 +0,0 @@
-function compile()
-% Licensed to the Apache Software Foundation (ASF) under one
-% or more contributor license agreements.  See the NOTICE file
-% distributed with this work for additional information
-% regarding copyright ownership.  The ASF licenses this file
-% to you under the Apache License, Version 2.0 (the
-% "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing,
-% software distributed under the License is distributed on an
-% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-% KIND, either express or implied.  See the License for the
-% specific language governing permissions and limitations
-% under the License.
-
-vars = common_vars();
-
-mkdir(vars.buildDir);
-
-ldflags = string.empty;
-if isunix
-    arrowHome = getenv("ARROW_HOME");
-    if isempty(arrowHome)
-        error("The ARROW_HOME environment variable must be set.");
-    end
-    ldflags(end+1) = "-Wl";
-    ldflags(end+1) = "-rpath '" + fullfile(arrowHome, "lib") + "'";
-end
-
-mex(fullfile(vars.srcDir, "featherreadmex.cc"), ...
-    fullfile(vars.srcDir, "feather_reader.cc"), ...
-    fullfile(vars.srcDir, "util", "handle_status.cc"), ...
-    "-L" + fullfile(arrowHome, "lib"), "-larrow", ...
-    "-I" + fullfile(arrowHome, "include"), ...
-    "LDFLAGS=""\$LDFLAGS " + strjoin(ldflags, ",") + """", ...
-    "-outdir", vars.buildDir, ...
-    "-R2018a", "-v");
-end
diff --git a/matlab/build_support/test.m b/matlab/build_support/test.m
deleted file mode 100644
index 990549e..0000000
--- a/matlab/build_support/test.m
+++ /dev/null
@@ -1,28 +0,0 @@
-function test()
-% Licensed to the Apache Software Foundation (ASF) under one
-% or more contributor license agreements.  See the NOTICE file
-% distributed with this work for additional information
-% regarding copyright ownership.  The ASF licenses this file
-% to you under the Apache License, Version 2.0 (the
-% "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing,
-% software distributed under the License is distributed on an
-% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-% KIND, either express or implied.  See the License for the
-% specific language governing permissions and limitations
-% under the License.
-
-vars = common_vars();
-
-compile();
-
-originalPath = addpath(vars.srcDir, vars.buildDir);
-restoreOriginalPath = onCleanup(@()path(originalPath));
-
-results = runtests(vars.testDir, "IncludeSubfolders", true, "OutputDetail", 3);
-assert(all(~[results.Failed]));
-end
diff --git a/matlab/doc/matlab_interface_for_apache_arrow_design.md b/matlab/doc/matlab_interface_for_apache_arrow_design.md
deleted file mode 100644
index de2bb13..0000000
--- a/matlab/doc/matlab_interface_for_apache_arrow_design.md
+++ /dev/null
@@ -1,366 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# MATLAB Interface for Apache Arrow 
-
-## Overview 
-This document outlines a high-level roadmap for development of a [MATLAB] Interface for Apache Arrow, which enables interfacing with [Arrow] memory. 
-
-## Use Cases 
-Apache Arrow is designed to enable a variety of high-performance columnar analytics use cases. 
-
-This design document focuses on a subset of use cases that we feel will help to lay the foundation for more advanced [use cases] in the future. 
-
-1. **UC1**: Ability to create, access, and delete Arrow memory using MATLAB code. 
-2. **UC2**: Ability to serialize and deserialize Arrow memory using MATLAB code to/from file formats like Parquet, Feather, JSON, and CSV. 
-3. **UC3**: Ability to move in-memory tabular data, represented as a MATLAB table, to other languages, like Python, R, and Rust, with minimal overhead (ideally, zero copy). 
-
-## Design 
-We envision a set of packaged (`arrow.*`) classes and functions allowing users to interact with key functionality from the Arrow C++ libraries using MATLAB code. 
-
-Included below is a list of example MATLAB and C++ APIs that would be exposed by the MATLAB Interface for Apache Arrow. 
-
-### MATLAB APIs 
-- `arrow.Buffer`
-- `arrow.Array`
-- `arrow.RecordBatch`
-- `arrow.Table`
-- `arrow.Field`
-- `arrow.Schema`
-- `arrow.type.DataType`
-  - `arrow.type.Float64`
-  - `arrow.type.String`
-  - `arrow.type.Date`
-  - `arrow.type.Time`
-  - ... 
-- `arrow.memory.getTotalBytesAllocated`
-- `arrow.memory.allocateBuffer`
-- ... 
-
-### C++ APIs 
-In order to enable interaction with the Arrow C++ libraries, the MATLAB Interface for Apache Arrow must expose associated C++ APIs for wrapping/unwrapping MATLAB [`mxArray`] data to/from appropriate Arrow C++ types. 
-
-The list below provides a few brief examples of what these C++ APIs might look like (intended to be consistent with the rest of the Arrow ecosystem). 
-
-- `arrow::matlab::is_array`
-- `arrow::matlab::is_record_batch` 
-- `arrow::matlab::is_table`
-- `arrow::matlab::unwrap_array`
-- `arrow::matlab::wrap_array`
-- `arrow::matlab::unwrap_record_batch`
-- `arrow::matlab::wrap_record_batch`
-- `arrow::matlab::unwrap_table`
-- `arrow::matlab::wrap_table`
-- ... 
-
-## Design Cases 
-
-### Use Case: UC1 
-A MATLAB developer could create an `arrow.Array` from an "ordinary" MATLAB array (e.g. a numeric row vector of type `double`). 
-They could then operate on this array in a variety of different ways (e.g. indexing/slicing, getting its type/class, clearing it from the workspace, etc.). 
-The `arrow.array` “factory function” returns a type-specific, concrete subclass of the abstract `arrow.Array` class based on the MATLAB type of the input array. For example, passing a double array to the `arrow.array` function will return a corresponding `arrow.Float64Array`. 
-
-**Note**: MATLAB [`missing` values] (e.g. `NaN`, `NaT`, `<undefined>`) are automatically converted into Arrow `NULL` values upon construction of an `arrow.Array` subclass instance. 
-
-###### Example Code: 
-``` matlab
->> A = randi(100, 1, 5) 
-A = 
-    82 91 13 92 64 
-
->> class(A) 
-ans = 
-    'double' 
-
->> A(4) = NaN; % Set the fourth element to NaN. 
-
->> AA = arrow.array(A); % Create an arrow.Array from A. 
-
->> class(AA) 
-ans = 
-    'arrow.Float64Array' 
-
->> AA(3:5) % Extract elements at indices 3 to 5 from AA. 
-ans = 
-    13 <NULL> 64 
-
->> clear AA; % Clear AA from workspace and release Arrow C++ memory. 
-```
-
-### Use Case: UC2 
-
-#### Developer Workflow for Writing a MATLAB Table to a Feather File 
-
-To serialize MATLAB data to a file on disk (e.g. Feather, Parquet), a MATLAB developer could start by constructing an `arrow.Table` using one of several different approaches. 
-
-They could individually compose the table from a set of `arrow.Array` objects (one for each table variable). 
-
-###### Example Code: 
-``` matlab
->> Var1 = arrow.array(["foo"; "bar"; "baz"]); 
-
->> Var2 = arrow.array([today; today + 1; today + 2]); 
-
->> Var3 = arrow.array([10; 20; 30]); 
-
->> AT = arrow.Table(Var1, Var2, Var3); 
-```
-Alternatively, they could directly convert from an existing MATLAB `table` to an `arrow.Table` using a function like `arrow.matlab2arrow` to convert between an existing MATLAB `table` and an `arrow.Table`. 
-
-###### Example Code: 
-``` matlab
->> Weight = [10; 24; 10; 12; 18]; 
-
->> Radius = [80; 135; 65; 70; 150]; 
-
->> Density = [10.2; 20.5; 11.2; 13.7; 17.8]; 
-
->> T = table(Weight, Radius, Density); % Create a MATLAB table 
-
->> AT = arrow.matlab2arrow(T); % Create an arrow.Table 
-```
-To serialize the `arrow.Table`, `AT`, to a file (e.g. Feather) on disk, the user could then instantiate an `arrow.FeatherTableWriter`. 
-
-###### Example Code: 
-``` matlab
->> featherTableWriter = arrow.FeatherTableWriter(); 
-
->> featherTableWriter.write(AT, "data.feather"); 
-```
-The Feather file could then be read and operated on by an external process like Rust or Go. To read it back into MATLAB after modification by another process, the user could instantiate an `arrow.FeatherTableReader`. 
-
-###### Example Code: 
-``` matlab
->> featherTableReader = arrow.FeatherTableReader("data.feather"); 
-
->> AT = featherTableReader.read(); 
-```
-#### Advanced MATLAB User Workflow for Implementing Support for Writing to Feather Files 
-
-To add support for writing to Feather files, an advanced MATLAB user could use the MATLAB and C++ APIs offered by the MATLAB Interface for Apache Arrow to create `arrow.FeatherTableWriter`. 
-
-They would need to author a [MEX function] (e.g. `featherwriteMEX`), which can be called directly by MATLAB code. Within their MEX function, they could use `arrow::matlab::unwrap_table` to convert between the MATLAB representation of the Arrow memory (`arrow.Table`) and the equivalent C++ representation (`arrow::Table`). Once the `arrow.Table` has been "unwrapped" into a C++ `arrow::Table`, it can be passed to the appropriate Arrow C++ library API for writing to a Feather file (`arrow::i [...]
-
-An analogous workflow could be followed to create `arrow.FeatherTableReader` to enable reading from Feather files. 
-
-#### Enabling High-Level Workflows 
-
-Ultimately, many of the APIs exposed by the MATLAB Interface for Apache Arrow are targeted at advanced MATLAB users. By leveraging these building blocks, advanced MATLAB users can create high-level interfaces, which are useful to everyday MATLAB users. An example of such a high-level interface would be `featherwrite`, intended to make it easy to write Feather files. A diagram summarizing the overall workflow and specific pieces an advanced user would need to author to create such a high- [...]
-
-![Code flow diagram](https://github.com/mathworks/matlab-arrow-support-files/raw/main/images/design_doc_code_flow_diagram.svg)
-
-### Use Case: UC3 
-
-Arrow supports several approaches to sharing memory locally. 
-
-Roughly speaking, local memory sharing workflows can be divided into two categories: 
-1. In-Process Memory Sharing 
-2. Out-of-Process Memory Sharing 
-
-#### In-Process Memory Sharing 
-
-[MATLAB supports running Python code within the MATLAB process]. In theory, because MATLAB and Python can share the same virtual address space, users should be able to share Arrow memory efficiently between MATLAB and PyArrow code. The [Apache Arrow C Data Interface] defines a lightweight C API for sharing Arrow data and metadata between multiple languages running within the same virtual address space. 
-
-To share a MATLAB `arrow.Array` with PyArrow efficiently, a user could use the `exportToCDataInterface` method to export the Arrow memory wrapped by an `arrow.Array` to the C Data Interface format, consisting of two C-style structs, [`ArrowArray`] and [`ArrowSchema`], which represent the Arrow data and associated metadata. 
-
-Memory addresses to the `ArrowArray` and `ArrowSchema` structs are returned by the call to `exportToCDataInterface`. These addresses can be passed to Python directly, without having to make any copies of the underlying Arrow data structures that they refer to. A user can then wrap the underlying data pointed to by the `ArrowArray` struct (which is already in the [Arrow Columnar Format]), as well as extract the necessary metadata from the `ArrowSchema` struct, to create a `pyarrow.Array`  [...]
-
-###### Example Code: 
-``` matlab
-% Create a MATLAB arrow.Array. 
->> AA = arrow.array([1, 2, 3, 4, 5]); 
-
-% Export the MATLAB arrow.Array to the C Data Interface format, returning the 
-% memory addresses of the required ArrowArray and ArrowSchema C-style structs. 
->> [arrayMemoryAddress, schemaMemoryAddress] = AA.exportToCDataInterface(); 
-
-% Import the memory addresses of the C Data Interface format structs to create a pyarrow.Array. 
->> PA = py.pyarrow.Array._import_from_c(arrayMemoryAddress, schemaMemoryAddress); 
-```
-Conversely, a user can create an Arrow array using PyArrow and share it with MATLAB. To do this, they can call the method `_export_to_c` to export a `pyarrow.Array` to the C Data Interface format. 
-
-The memory addresses to the `ArrowArray` and `ArrowSchema` structs populated by the call to `_export_to_c` can be passed to the static method `arrow.Array.importFromCDataInterface` to construct a MATLAB `arrow.Array` with zero copies. 
-
-The example code below is adapted from the [`test_cffi.py` test cases for PyArrow]. 
-
-###### Example Code: 
-``` matlab
-% Make a pyarrow.Array. 
->> PA = py.pyarrow.array([1, 2, 3, 4, 5]); 
-
-% Create ArrowArray and ArrowSchema C-style structs adhering to the Arrow C Data Interface format. 
->> array = py.pyarrow.cffi.ffi.new("struct ArrowArray*") 
-
->> arrayMemoryAddress = py.int(py.pyarrow.cffi.ffi.cast("uintptr_t", array)); 
-
->> schema = py.pyarrow.cffi.ffi.new("struct ArrowSchema*") 
-
->> schemaMemoryAddress = py.int(py.pyarrow.cffi.ffi.cast("uintptr_t", schema)); 
-
-% Export the pyarrow.Array to the C Data Interface format, populating the required ArrowArray and ArrowShema structs. 
->> PA.export_to_c(arrayMemoryAddress, schemaMemoryAddress) 
-
-% Import the C Data Interface structs to create a MATLAB arrow.Array. 
->> AA = arrow.Array.importFromCDataInterface(arrayMemoryAddress, schemaMemoryAddress); 
-```
-
-#### Out-of-Process Memory Sharing 
-
-[MATLAB supports running Python code in a separate process]. A user could leverage the MATLAB Interface for Apache Arrow to share Arrow memory between MATLAB and PyArrow running within a separate Python process using one of the following approaches described below. 
-
-##### Memory-Mapped IPC File 
-
-For large tables used in a multi-process "data processing pipeline", a user could serialize their `arrow.Table` to the Arrow IPC File Format. Then, this file could be memory-mapped (zero-copy) by PyArrow running in a separate process to read the data in with minimal overhead. The fact that the Arrow IPC File Format is a 1:1 mapping of the in-memory Arrow format on disk, makes the memory-mapping highly performant as no custom deserialization/conversion is required to construct a `pyarrow. [...]
-
-###### Example Code: 
-``` matlab
-% Create a MATLAB arrow.Table. 
->> Var1 = arrow.array(["foo", "bar", "baz"]); 
-
->> Var2 = arrow.array([today, today + 1, today + 2]); 
-
->> Var3 = arrow.array([10, 20, 30]); 
-
->> AT = arrow.Table(Var1, Var2, Var3); 
-
-% Write the MATLAB arrow.Table to the Arrow IPC File Format on disk. 
->> arrow.ipcwrite(AT, "data.arrow"); 
-
-% Run Python in a separate process. 
->> pyenv("ExecutionMode", "OutOfProcess");  
-
-% Memory map the Arrow IPC File. 
->> memoryMappedFile = py.pyarrow.memory_map("data.arrow"); 
-
-% Construct pyarrow.ipc.RecordBatchFileReader to read the Arrow IPC File. 
->> recordBatchFileReader = py.pyarrow.ipc.open_file(memoryMappedFile); 
-
-% Read all record batches from the Arrow IPC File in one-shot and return a pyarrow.Table. 
->> PAT = recordBatchFileReader.read_all() 
-```
-
-##### Plasma Object Store 
-_**Note**: Plasma is informally deprecated. It may not make sense to support it._
-
-Users could also share Arrow memory across process boundaries by using the [Plasma Object Store]. 
-
-The code examples below assume a Plasma Object Store process is already running at `/tmp/plasma`. 
-
-A MATLAB user could connect to the running Plasma Object Store process from MATLAB and share an `arrow.Array`,  with an ID `"123"`. 
-
-###### Example Code: 
-``` matlab
->> AA = arrow.array([1, 2, 3]); 
-
->> ID = 123; 
-
->> plasmaClientMATLAB = arrow.plasma.PlasmaClient("/tmp/plasma"); 
-
->> plasmaClientMATLAB.put(AA, ID); 
-
->> plasmaClientMATLAB.seal(ID); 
- ```
-
-To consume the Arrow object shared from MATLAB in another process (for example, a C++ process), a user could connect to the same Plasma Object Store process using the Arrow C++ Libraries (example code based on this [tutorial]). 
-
-###### Example Code: 
-``` c++
-#include <plasma/client.h> 
-
-using namespace plasma; 
-
-int main(int argc, char** argv) { 
-  // Start up and connect a Plasma client. 
-  PlasmaClient client; 
-
-  ARROW_CHECK_OK(client.Connect("/tmp/plasma")); 
-
-  // Get from the Plasma store by Object ID. 
-  ObjectBuffer object_buffer; 
-
-  client.Get(123, 1, -1, &object_buffer); 
-  ... 
-} 
-```
-
-## Testing 
-To ensure code quality, we would like to include the following testing infrastructure, at a minimum: 
-1. C++ APIs 
-   - GoogleTest C++ Unit Tests 
-   - Integration with CI workflows 
-2. MATLAB APIs  
-   - [MATLAB Class-Based Unit Tests] 
-   - Integration with CI workflows 
-3. [Integration Testing]
-
-## Documentation 
-To ensure usability, discoverability, and accessibility, we would like to include high quality documentation for the MATLAB Interface for Apache Arrow. 
-
-Specific areas of documentation would include: 
-1. [MATLAB Help Text] for MATLAB APIs
-2. MATLAB API reference
-3. Usage examples of MATLAB and C++ APIs
-4. README for building and installation 
-5. Build system documentation 
-6. CI integration documentation 
-
-## Installation 
-We would ideally like to make it as easy as possible for MATLAB users to install the MATLAB Interface for Apache Arrow without the need to compile [MEX] functions or perform any other manual configuration steps. 
-
-In MATLAB, users normally install optional software packages via the [Add-On Explorer]. This workflow is analogous to the way a [JavaScript user] would install the [`apache-arrow` package via the `npm` package manager] or the way a [Rust user] would install the [`arrow` crate via the `cargo` package manager]. 
-
-In the short term, in the absence of an easily installable MATLAB Add-On, we plan to maintain up-to-date, clearly explained, build and installation instructions for recent versions of MATLAB on GitHub. 
-
-In addition, we'd like to include pre-built MEX functions for Windows, Mac, and Linux that get built regularly via CI workflows. This would allow users to try out the latest functionality without having to manually build the MEX interfaces from scratch. 
-
-## Roadmap 
-The table below provides a high-level roadmap for the development of specific capabilities in the MATLAB Interface for Apache Arrow. 
-
-| Capability                       | Use Case | Timeframe |
-|----------------------------------|----------|-----------|
-| Arrow Memory Interaction         | UC1      | Near Term |
-| File Reading/Writing             | UC2      | Near Term |
-| In/Out-of-Process Memory Sharing | UC3      | Mid Term  |
-
-<!-- Links -->
-[MATLAB]: https://www.mathworks.com/products/matlab.html
-[Arrow]: https://arrow.apache.org/
-[use cases]: https://arrow.apache.org/use_cases/
-[`mxArray`]: https://www.mathworks.com/help/matlab/matlab_external/matlab-data.html
-['missing' values]: https://www.mathworks.com/help/matlab/data_analysis/missing-data-in-matlab.html
-[MEX function]: https://www.mathworks.com/help/matlab/call-mex-file-functions.html
-[several approaches to sharing memory locally]: https://arrow.apache.org/use_cases/#sharing-memory-locally
-[MATLAB supports running Python code within the MATLAB process]: https://www.mathworks.com/help/matlab/matlab_external/create-object-from-python-class.html
-[Apache Arrow C Data Interface]: https://arrow.apache.org/docs/format/CDataInterface.html
-[`ArrowArray`]: https://arrow.apache.org/docs/format/CDataInterface.html#the-arrowarray-structure
-[`ArrowSchema`]: https://arrow.apache.org/docs/format/CDataInterface.html#the-arrowschema-structure
-[Arrow Columnar Format]: https://arrow.apache.org/docs/format/Columnar.html
-[`test_cffi.py` test cases for PyArrow]: https://github.com/apache/arrow/blob/97879eb970bac52d93d2247200b9ca7acf6f3f93/python/pyarrow/tests/test_cffi.py#L109
-[MATLAB supports running Python code in a separate process]: https://www.mathworks.com/help/matlab/matlab_external/out-of-process-execution-of-python-functionality.html
-[Plasma Object Store]: https://arrow.apache.org/docs/python/plasma.html
-[tutorial]: https://github.com/apache/arrow/blob/master/cpp/apidoc/tutorials/plasma.md#getting-an-object
-[MATLAB Class-Based Unit Tests]: https://www.mathworks.com/help/matlab/class-based-unit-tests.html
-[Integration Testing]: https://arrow.apache.org/docs/format/Integration.html
-[MATLAB Help Text]: https://www.mathworks.com/help/matlab/matlab_prog/add-help-for-your-program.html
-[MEX]: https://www.mathworks.com/help/matlab/call-mex-files-1.html
-[Add-On Explorer]: https://www.mathworks.com/help/matlab/matlab_env/get-add-ons.html
-[JavaScript user]: https://github.com/apache/arrow/tree/master/js
-[`apache-arrow` package via the `npm` package manager]: https://www.npmjs.com/package/apache-arrow
-[Rust user]: https://github.com/apache/arrow/tree/master/rust
-[`arrow` crate via the `cargo` package manager]: https://crates.io/crates/arrow
diff --git a/matlab/src/+mlarrow/+util/createMetadataStruct.m b/matlab/src/+mlarrow/+util/createMetadataStruct.m
deleted file mode 100644
index 7a23970..0000000
--- a/matlab/src/+mlarrow/+util/createMetadataStruct.m
+++ /dev/null
@@ -1,24 +0,0 @@
-function metadata = createMetadataStruct(description, numRows, numVariables)
-% CREATEMETADATASTRUCT Helper function for creating Feather MEX metadata
-% struct.
-
-% Licensed to the Apache Software Foundation (ASF) under one or more
-% contributor license agreements.  See the NOTICE file distributed with
-% this work for additional information regarding copyright ownership.
-% The ASF licenses this file to you under the Apache License, Version
-% 2.0 (the "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS,
-% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-% implied.  See the License for the specific language governing
-% permissions and limitations under the License.
-
-metadata = struct('Description', description, ...
-                  'NumRows', numRows, ...
-                  'NumVariables', numVariables);
-end
-
diff --git a/matlab/src/+mlarrow/+util/createVariableStruct.m b/matlab/src/+mlarrow/+util/createVariableStruct.m
deleted file mode 100644
index 99f52d8..0000000
--- a/matlab/src/+mlarrow/+util/createVariableStruct.m
+++ /dev/null
@@ -1,24 +0,0 @@
-function variable = createVariableStruct(type, data, valid, name)
-% CREATEVARIABLESTRUCT Helper function for creating Feather MEX variable
-% struct.
-
-% Licensed to the Apache Software Foundation (ASF) under one or more
-% contributor license agreements.  See the NOTICE file distributed with
-% this work for additional information regarding copyright ownership.
-% The ASF licenses this file to you under the Apache License, Version
-% 2.0 (the "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS,
-% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-% implied.  See the License for the specific language governing
-% permissions and limitations under the License.
-
-variable = struct('Type', type, ...
-                  'Data', data, ...
-                  'Valid', valid, ...
-                  'Name', name);
-end
\ No newline at end of file
diff --git a/matlab/src/+mlarrow/+util/makeValidMATLABTableVariableNames.m b/matlab/src/+mlarrow/+util/makeValidMATLABTableVariableNames.m
deleted file mode 100644
index ba5e072..0000000
--- a/matlab/src/+mlarrow/+util/makeValidMATLABTableVariableNames.m
+++ /dev/null
@@ -1,42 +0,0 @@
-function [variableNames, variableDescriptions] = makeValidMATLABTableVariableNames(columnNames)
-% makeValidMATLABTableVariableNames Makes valid MATLAB table variable names
-% from a set of Feather table column names.
-% 
-% [variableNames, variableDescriptions] = makeValidMATLABTableVariableNames(columnNames)
-% Modifies the input Feather table columnNames to be valid MATLAB table
-% variable names if they are not already. If any of the Feather table columnNames
-% are invalid MATLAB table variable names, then the original columnNames are returned
-% in variableDescriptions to be stored in the table.Properties.VariableDescriptions
-% property.
-
-% Licensed to the Apache Software Foundation (ASF) under one or more
-% contributor license agreements.  See the NOTICE file distributed with
-% this work for additional information regarding copyright ownership.
-% The ASF licenses this file to you under the Apache License, Version
-% 2.0 (the "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS,
-% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-% implied.  See the License for the specific language governing
-% permissions and limitations under the License.
-
-    variableNames = string(columnNames);
-    variableDescriptions = strings(0, 0);
-    
-    validVariableNames = false(1, length(variableNames));
-    for ii = 1:length(variableNames)
-        validVariableNames(ii) = isvarname(variableNames(ii));
-    end
-    
-    if ~all(validVariableNames)
-        variableDescriptions = strings(1, length(columnNames));
-        variableDescriptions(validVariableNames) = "";
-        variableDescriptions(~validVariableNames) = compose("Original variable name: '%s'", ...
-                                                          variableNames(~validVariableNames));
-        variableNames(~validVariableNames) = matlab.lang.makeValidName(variableNames(~validVariableNames));
-    end
-end
diff --git a/matlab/src/+mlarrow/+util/table2mlarrow.m b/matlab/src/+mlarrow/+util/table2mlarrow.m
deleted file mode 100644
index 3103724..0000000
--- a/matlab/src/+mlarrow/+util/table2mlarrow.m
+++ /dev/null
@@ -1,83 +0,0 @@
-function [variables, metadata] = table2mlarrow(t)
-%TABLE2MLARROW Converts a MATLAB table into a form
-%   suitable for passing to the mlarrow C++ MEX layer.
-%
-%   [VARIABLES, METADATA] = TABLE2MLARROW(T)
-%   Takes a MATLAB table T and returns struct array equivalents
-%   which are suitable for passing to the mlarrow C++ MEX layer.
-%
-%   VARIABLES is an 1xN struct array representing the the table variables.
-%
-%   VARIABLES contains the following fields:
-%
-%   Field Name     Class        Description
-%   ------------   -------      ----------------------------------------------
-%   Name           char         Variable's name
-%   Type           char         Variable's MATLAB datatype
-%   Data           numeric      Variable's data
-%   Valid          logical      0 = invalid (null), 1 = valid (non-null) value
-%
-%   METADATA is a 1x1 struct array with the following fields:
-%
-%   METADATA contains the following fields:
-%
-%   Field Name    Class         Description
-%   ------------  -------       ----------------------------------------------
-%   Description   char          Table description (T.Properties.Description)
-%   NumRows       double        Number of table rows (height(T))
-%   NumVariables  double        Number of table variables (width(T))
-%
-%   See also FEATHERREAD, FEATHERWRITE.
-
-% Licensed to the Apache Software Foundation (ASF) under one or more
-% contributor license agreements.  See the NOTICE file distributed with
-% this work for additional information regarding copyright ownership.
-% The ASF licenses this file to you under the Apache License, Version
-% 2.0 (the "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS,
-% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-% implied.  See the License for the specific language governing
-% permissions and limitations under the License.
-
-import mlarrow.util.*;
-
-% Struct array representing the underlying data of each variable
-% in the given table.
-variables = repmat(createVariableStruct('', [], [], ''), 1, width(t));
-
-% Struct representing table-level metadata.
-metadata = createMetadataStruct(t.Properties.Description, height(t), width(t));
-
-% Iterate over each variable in the given table,
-% extracting the underlying array data.
-for ii = 1:width(t)
-    data = t.(ii);
-    % Multi-column table variables are unsupported.
-    if ~isvector(data)
-        error('MATLAB:arrow:MultiColumnVariablesUnsupported', ...
-              'Multi-column table variables are unsupported by featherwrite.');
-    end
-    % Get the datatype of the current variable's underlying array.
-    variables(ii).Type = class(data);
-    % Break the datatype down into its constituent components, if appropriate.
-    switch variables(ii).Type
-        % For numeric variables, the underlying array data can
-        % be passed to the C++ layer directly.
-        case {'uint8', 'uint16', 'uint32', 'uint64', ...
-              'int8', 'int16', 'int32', 'int64', ...
-              'single', 'double'}
-            variables(ii).Data = data;
-        otherwise
-            error('MATLAB:arrow:UnsupportedVariableType', ...
-                 ['Type ' variables(ii).Type ' is unsupported by featherwrite.']);
-    end
-    variables(ii).Valid = ~ismissing(data);
-    variables(ii).Name = t.Properties.VariableNames{ii};
-end
-
-end
diff --git a/matlab/src/feather_reader.cc b/matlab/src/feather_reader.cc
deleted file mode 100644
index 484c300..0000000
--- a/matlab/src/feather_reader.cc
+++ /dev/null
@@ -1,267 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#include <algorithm>
-#include <cmath>
-
-#include <arrow/io/file.h>
-#include <arrow/ipc/feather.h>
-#include <arrow/status.h>
-#include <arrow/table.h>
-#include <arrow/type.h>
-#include <arrow/util/bit-util.h>
-
-#include <mex.h>
-
-#include "feather_reader.h"
-#include "matlab_traits.h"
-#include "util/handle_status.h"
-#include "util/unicode_conversion.h"
-
-namespace arrow {
-namespace matlab {
-namespace internal {
-
-// Read the name of variable i from the Feather file as a mxArray*.
-mxArray* ReadVariableName(const std::string& column_name) {
-  return matlab::util::ConvertUTF8StringToUTF16CharMatrix(column_name);
-}
-
-template <typename ArrowDataType>
-mxArray* ReadNumericVariableData(const std::shared_ptr<Array>& column) {
-  using MatlabType = typename MatlabTraits<ArrowDataType>::MatlabType;
-  using ArrowArrayType = typename TypeTraits<ArrowDataType>::ArrayType;
-
-  const mxClassID matlab_class_id = MatlabTraits<ArrowDataType>::matlab_class_id;
-  // Allocate a numeric mxArray* with the correct mxClassID based on the type of the
-  // arrow::Array.
-  mxArray* variable_data =
-      mxCreateNumericMatrix(column->length(), 1, matlab_class_id, mxREAL);
-
-  std::shared_ptr<ArrowArrayType> integer_array =
-      std::static_pointer_cast<ArrowArrayType>(column);
-
-  // Get a raw pointer to the Arrow array data.
-  const MatlabType* source = integer_array->raw_values();
-
-  // Get a mutable pointer to the MATLAB array data and std::copy the
-  // Arrow array data into it.
-  MatlabType* destination = MatlabTraits<ArrowDataType>::GetData(variable_data);
-  std::copy(source, source + column->length(), destination);
-
-  return variable_data;
-}
-
-// Read the data of variable i from the Feather file as a mxArray*.
-mxArray* ReadVariableData(const std::shared_ptr<Array>& column,
-                          const std::string& column_name) {
-  std::shared_ptr<DataType> type = column->type();
-
-  switch (type->id()) {
-    case Type::FLOAT:
-      return ReadNumericVariableData<FloatType>(column);
-    case Type::DOUBLE:
-      return ReadNumericVariableData<DoubleType>(column);
-    case Type::UINT8:
-      return ReadNumericVariableData<UInt8Type>(column);
-    case Type::UINT16:
-      return ReadNumericVariableData<UInt16Type>(column);
-    case Type::UINT32:
-      return ReadNumericVariableData<UInt32Type>(column);
-    case Type::UINT64:
-      return ReadNumericVariableData<UInt64Type>(column);
-    case Type::INT8:
-      return ReadNumericVariableData<Int8Type>(column);
-    case Type::INT16:
-      return ReadNumericVariableData<Int16Type>(column);
-    case Type::INT32:
-      return ReadNumericVariableData<Int32Type>(column);
-    case Type::INT64:
-      return ReadNumericVariableData<Int64Type>(column);
-    default: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:UnsupportedArrowType",
-                        "Unsupported arrow::Type '%s' for variable '%s'",
-                        type->name().c_str(), column_name.c_str());
-      break;
-    }
-  }
-
-  return nullptr;
-}
-
-// arrow::Buffers are bit-packed, while mxLogical arrays aren't. This utility
-// uses an Arrow utility to copy each bit of an arrow::Buffer into each byte
-// of an mxLogical array.
-void BitUnpackBuffer(const std::shared_ptr<Buffer>& source, int64_t length,
-                     mxLogical* destination) {
-  const uint8_t* source_data = source->data();
-
-  // Call into an Arrow utility to visit each bit in the bitmap.
-  auto visitFcn = [&](mxLogical is_valid) { *destination++ = is_valid; };
-
-  const int64_t start_offset = 0;
-  arrow::internal::VisitBitsUnrolled(source_data, start_offset, length, visitFcn);
-}
-
-// Populates the validity bitmap from an arrow::Array.
-// writes to a zero-initialized destination buffer.
-// Implements a fast path for the fully-valid and fully-invalid cases.
-// Returns true if the destination buffer was successfully populated.
-bool TryBitUnpackFastPath(const std::shared_ptr<Array>& array,
-                          mxLogical* destination) {
-  const int64_t null_count = array->null_count();
-  const int64_t length = array->length();
-
-  if (null_count == length) {
-    // The source array is filled with invalid values. Since mxCreateLogicalMatrix
-    // zero-initializes the destination buffer, we can return without changing anything
-    // in the destination buffer.
-    return true;
-  } else if (null_count == 0) {
-    // The source array contains only valid values. Fill the destination buffer
-    // with 'true'.
-    std::fill(destination, destination + length, true);
-    return true;
-  }
-
-  // Return false to indicate that we couldn't fill the entire validity bitmap.
-  return false;
-}
-
-// Read the validity (null) bitmap of variable i from the Feather
-// file as an mxArray*.
-mxArray* ReadVariableValidityBitmap(const std::shared_ptr<Array>& column) {
-  // Allocate an mxLogical array to store the validity (null) bitmap values.
-  // Note: All Arrow arrays can have an associated validity (null) bitmap.
-  // The Apache Arrow specification defines 0 (false) to represent an
-  // invalid (null) array entry and 1 (true) to represent a valid
-  // (non-null) array entry.
-  mxArray* validity_bitmap = mxCreateLogicalMatrix(column->length(), 1);
-  mxLogical* validity_bitmap_unpacked = mxGetLogicals(validity_bitmap);
-
-  if (!TryBitUnpackFastPath(column, validity_bitmap_unpacked)) {
-    // Couldn't fill the full validity bitmap at once. Call an optimized loop-unrolled
-    // implementation instead that goes byte-by-byte and populates the validity bitmap.
-    BitUnpackBuffer(column->null_bitmap(), column->length(), validity_bitmap_unpacked);
-  }
-
-  return validity_bitmap;
-}
-
-// Read the type name of an arrow::Array as an mxChar array.
-mxArray* ReadVariableType(const std::shared_ptr<Array>& column) {
-  return util::ConvertUTF8StringToUTF16CharMatrix(column->type()->name());
-}
-
-// MATLAB arrays cannot be larger than 2^48 elements.
-static constexpr uint64_t MAX_MATLAB_SIZE = static_cast<uint64_t>(0x01) << 48;
-
-}  // namespace internal
-
-Status FeatherReader::Open(const std::string& filename,
-                           std::shared_ptr<FeatherReader>* feather_reader) {
-  *feather_reader = std::shared_ptr<FeatherReader>(new FeatherReader());
-
-  // Open file with given filename as a ReadableFile.
-  std::shared_ptr<io::ReadableFile> readable_file(nullptr);
-
-  RETURN_NOT_OK(io::ReadableFile::Open(filename, &readable_file));
-
-  // TableReader expects a RandomAccessFile.
-  std::shared_ptr<io::RandomAccessFile> random_access_file(readable_file);
-
-  // Open the Feather file for reading with a TableReader.
-  RETURN_NOT_OK(ipc::feather::TableReader::Open(random_access_file,
-                                                &(*feather_reader)->table_reader_));
-
-  // Read the table metadata from the Feather file.
-  (*feather_reader)->num_rows_ = (*feather_reader)->table_reader_->num_rows();
-  (*feather_reader)->num_variables_ = (*feather_reader)->table_reader_->num_columns();
-  (*feather_reader)->description_ =
-      (*feather_reader)->table_reader_->HasDescription()
-          ? (*feather_reader)->table_reader_->GetDescription()
-          : "";
-
-  if ((*feather_reader)->num_rows_ > internal::MAX_MATLAB_SIZE ||
-      (*feather_reader)->num_variables_ > internal::MAX_MATLAB_SIZE) {
-    mexErrMsgIdAndTxt("MATLAB:arrow:SizeTooLarge",
-                      "The table size exceeds MATLAB limits: %u x %u",
-                      (*feather_reader)->num_rows_, (*feather_reader)->num_variables_);
-  }
-
-  return Status::OK();
-}
-
-// Read the table metadata from the Feather file as a mxArray*.
-mxArray* FeatherReader::ReadMetadata() const {
-  const int32_t num_metadata_fields = 3;
-  const char* fieldnames[] = {"NumRows", "NumVariables", "Description"};
-
-  // Create a mxArray struct array containing the table metadata to be passed back to
-  // MATLAB.
-  mxArray* metadata = mxCreateStructMatrix(1, 1, num_metadata_fields, fieldnames);
-
-  // Returning double values to MATLAB since that is the default type.
-
-  // Set the number of rows.
-  mxSetField(metadata, 0, "NumRows",
-             mxCreateDoubleScalar(static_cast<double>(num_rows_)));
-
-  // Set the number of variables.
-  mxSetField(metadata, 0, "NumVariables",
-             mxCreateDoubleScalar(static_cast<double>(num_variables_)));
-
-  // Set the description.
-  mxSetField(metadata, 0, "Description",
-             util::ConvertUTF8StringToUTF16CharMatrix(description_));
-
-  return metadata;
-}
-
-// Read the table variables from the Feather file as a mxArray*.
-mxArray* FeatherReader::ReadVariables() const {
-  const int32_t num_variable_fields = 4;
-  const char* fieldnames[] = {"Name", "Type", "Data", "Valid"};
-
-  // Create an mxArray* struct array containing the table variables to be passed back to
-  // MATLAB.
-  mxArray* variables =
-      mxCreateStructMatrix(1, num_variables_, num_variable_fields, fieldnames);
-
-  // Read all the table variables in the Feather file into memory.
-  for (int64_t i = 0; i < num_variables_; ++i) {
-    std::shared_ptr<ChunkedArray> column;
-    util::HandleStatus(table_reader_->GetColumn(i, &column));
-    if (column->num_chunks() != 1) {
-      mexErrMsgIdAndTxt("MATLAB:arrow:FeatherReader::ReadVariables",
-                        "Chunked columns not yet supported");
-    }
-    std::shared_ptr<Array> chunk = column->chunk(0);
-    const std::string column_name = table_reader_->GetColumnName(i);
-
-    // set the struct fields data
-    mxSetField(variables, i, "Name", internal::ReadVariableName(column_name));
-    mxSetField(variables, i, "Type", internal::ReadVariableType(chunk));
-    mxSetField(variables, i, "Data", internal::ReadVariableData(chunk, column_name));
-    mxSetField(variables, i, "Valid", internal::ReadVariableValidityBitmap(chunk));
-  }
-
-  return variables;
-}
-
-}  // namespace matlab
-}  // namespace arrow
diff --git a/matlab/src/feather_reader.h b/matlab/src/feather_reader.h
deleted file mode 100644
index 00fea68..0000000
--- a/matlab/src/feather_reader.h
+++ /dev/null
@@ -1,77 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#pragma once
-
-#include <memory>
-#include <string>
-
-#include <arrow/ipc/feather.h>
-#include <arrow/status.h>
-#include <arrow/type.h>
-
-#include <matrix.h>
-
-namespace arrow {
-namespace matlab {
-
-class FeatherReader {
- public:
-  ~FeatherReader() = default;
-
-  /// \brief Read the table metadata as an mxArray* struct from the given
-  ///        Feather file.
-  ///        The returned mxArray* struct contains the following fields:
-  ///         - "Description"  :: Nx1 mxChar array, table-level description
-  ///         - "NumRows"      :: scalar mxDouble array, number of rows in the
-  ///                             table
-  ///         - "NumVariables" :: scalar mxDouble array, number of variables in
-  ///                             the table
-  ///        Clients are responsible for freeing the returned mxArray memory
-  ///        when it is no longer needed, or passing it to MATLAB to be managed.
-  /// \return metadata mxArray* scalar struct containing table level metadata
-  mxArray* ReadMetadata() const;
-
-  /// \brief Read the table variable data as an mxArray* struct array from the
-  ///        given Feather file.
-  ///        The returned mxArray* struct array has the following fields:
-  ///         - "Name"  :: Nx1 mxChar array, name of the variable
-  ///         - "Type"  :: Nx1 mxChar array, the variable's Arrow datatype
-  ///         - "Data"  :: Nx1 mxArray, data for the variable
-  ///         - "Valid" :: Nx1 mxLogical array, validity (null) bitmap
-  ///        Clients are responsible for freeing the returned mxArray memory
-  ///        when it is no longer needed, or passing it to MATLAB to be managed.
-  /// \return variables mxArray* struct array containing table variable data
-  mxArray* ReadVariables() const;
-
-  /// \brief Initialize a FeatherReader object from a given Feather file.
-  /// \param[in] filename path to a Feather file
-  /// \param[out] feather_reader uninitialized FeatherReader object
-  static Status Open(const std::string& filename,
-                     std::shared_ptr<FeatherReader>* feather_reader);
-
- private:
-  FeatherReader() = default;
-  std::unique_ptr<ipc::feather::TableReader> table_reader_;
-  int64_t num_rows_;
-  int64_t num_variables_;
-  std::string description_;
-};
-
-}  // namespace matlab
-}  // namespace arrow
-
diff --git a/matlab/src/feather_writer.cc b/matlab/src/feather_writer.cc
deleted file mode 100644
index bd1576b..0000000
--- a/matlab/src/feather_writer.cc
+++ /dev/null
@@ -1,338 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#include <cmath>
-#include <functional> /* for std::multiplies */
-#include <numeric>    /* for std::accumulate */
-
-#include <arrow/array.h>
-#include <arrow/buffer.h>
-#include <arrow/io/file.h>
-#include <arrow/ipc/feather.h>
-#include <arrow/status.h>
-#include <arrow/table.h>
-#include <arrow/type.h>
-#include <arrow/util/bit-util.h>
-
-#include <mex.h>
-
-#include "feather_writer.h"
-#include "matlab_traits.h"
-#include "util/handle_status.h"
-
-namespace arrow {
-namespace matlab {
-namespace internal {
-
-// Utility that helps verify the input mxArray struct field name and type.
-// Returns void since any errors will throw and terminate MEX execution.
-void ValidateMxStructField(const mxArray* struct_array, const char* fieldname,
-                           mxClassID expected_class_id, bool can_be_empty) {
-  // Check that the input mxArray is a struct array.
-  if (!mxIsStruct(struct_array)) {
-    mexErrMsgIdAndTxt("MATLAB:arrow:IncorrectDimensionsOrType",
-                      "Input needs to be a struct array");
-  }
-
-  // Return early if an empty table is provided as input.
-  if (mxIsEmpty(struct_array)) {
-    return;
-  }
-
-  mxArray* field = mxGetField(struct_array, 0, fieldname);
-
-  if (!field) {
-    mexErrMsgIdAndTxt("MATLAB:arrow:MissingStructField",
-                      "Missing field '%s' in input struct array", fieldname);
-  }
-
-  mxClassID actual_class_id = mxGetClassID(field);
-
-  // Avoid type check if an mxUNKNOWN_CLASS is provided since the UNKNOWN type is used to
-  // signify genericity in the input type.
-  if (expected_class_id != mxUNKNOWN_CLASS) {
-    if (expected_class_id != actual_class_id) {
-      mexErrMsgIdAndTxt("MATLAB:arrow:MissingStructField",
-                        "Incorrect type '%s' for struct array field '%s'",
-                        mxGetClassName(field), fieldname);
-    }
-  }
-
-  // Some struct fields (like the table description) can be empty, while others 
-  // (like NumRows) should never be empty. This conditional helps account for both cases.
-  if (!can_be_empty) {
-    // Ensure that individual mxStructArray fields are non-empty.
-    // We can call mxGetData after this without needing another null check.
-    if (mxIsEmpty(field)) {
-      mexErrMsgIdAndTxt("MATLAB:arrow:EmptyStructField",
-                        "Struct array field '%s' cannot be empty", fieldname);
-    }
-  }
-}
-
-// Utility function to convert mxChar mxArray* to std::string while preserving
-// Unicode code points.
-std::string MxArrayToString(const mxArray* array) {
-  // Return empty std::string if a mxChar array is not passed in.
-  if (!mxIsChar(array)) {
-    return std::string();
-  }
-
-  // Convert mxArray first to a C-style char array, then copy into a std::string.
-  char* utf8_array = mxArrayToUTF8String(array);
-  std::string output(utf8_array);
-
-  // Free the allocated char* from the MEX runtime.
-  mxFree(utf8_array);
-
-  return output;
-}
-
-// Compare number of columns and exit out to the MATLAB layer if incorrect.
-void ValidateNumColumns(int64_t actual, int64_t expected) {
-  if (actual != expected) {
-    mexErrMsgIdAndTxt("MATLAB:arrow:IncorrectNumberOfColumns",
-                      "Received only '%d' columns but expected '%d' columns", actual,
-                      expected);
-  }
-}
-
-// Compare number of rows and exit out to the MATLAB layer if incorrect.
-void ValidateNumRows(int64_t actual, int64_t expected) {
-  if (actual != expected) {
-    mexErrMsgIdAndTxt("MATLAB:arrow:IncorrectNumberOfRows",
-                      "Received only '%d' rows but expected '%d' rows", actual, expected);
-  }
-}
-
-// Calculate the number of bytes required in the bit-packed validity buffer.
-constexpr int64_t BitPackedLength(int64_t num_elements) {
-  // Since mxLogicalArray encodes [0, 1] in a full byte, we can compress that byte
-  // down to a bit...therefore dividing the mxLogicalArray length by 8 here.
-  return static_cast<int64_t>(std::ceil(num_elements / 8.0));
-}
-
-// Calculate the total number of elements in an mxArray
-// We have to do this separately since mxGetNumberOfElements only works in numeric arrays
-size_t GetNumberOfElements(const mxArray* array) {
-  // Get the dimensions and the total number of dimensions from the mxArray*.
-  const size_t num_dimensions = mxGetNumberOfDimensions(array);
-  const size_t* dimensions = mxGetDimensions(array);
-
-  // Iterate over the dimensions array and accumulate the total number of elements.
-  return std::accumulate(dimensions, dimensions + num_dimensions, 1,
-                         std::multiplies<size_t>());
-}
-
-// Write an mxLogicalArray* into a bit-packed arrow::MutableBuffer
-void BitPackBuffer(const mxArray* logical_array,
-                   std::shared_ptr<MutableBuffer> packed_buffer) {
-  // Error out if the incorrect type is passed in.
-  if (!mxIsLogical(logical_array)) {
-    mexErrMsgIdAndTxt(
-        "MATLAB:arrow:IncorrectType",
-        "Expected mxLogical array as input but received mxArray of class '%s'",
-        mxGetClassName(logical_array));
-  }
-
-  // Validate that the input arrow::Buffer has sufficient size to store a full bit-packed
-  // representation of the input mxLogicalArray
-  int64_t unpacked_buffer_length = GetNumberOfElements(logical_array);
-  if (BitPackedLength(unpacked_buffer_length) > packed_buffer->capacity()) {
-    mexErrMsgIdAndTxt("MATLAB:arrow:BufferSizeExceeded",
-                      "Buffer of size %d bytes cannot store %d bytes of data",
-                      packed_buffer->capacity(), BitPackedLength(unpacked_buffer_length));
-  }
-
-  // Get pointers to the internal uint8_t arrays behind arrow::Buffer and mxArray
-  uint8_t* packed_buffer_ptr = packed_buffer->mutable_data();
-  mxLogical* unpacked_buffer_ptr = mxGetLogicals(logical_array);
-
-  // Iterate over the mxLogical array and write bit-packed bools to the arrow::Buffer.
-  // Call into a loop-unrolled Arrow utility for better performance when bit-packing.
-  auto generator = [&]() -> uint8_t { return *unpacked_buffer_ptr++; };
-  const int64_t start_offset = 0;
-  arrow::internal::GenerateBitsUnrolled(packed_buffer_ptr, start_offset,
-                                        unpacked_buffer_length, generator);
-}
-
-// Write numeric datatypes to the Feather file.
-template <typename ArrowDataType>
-std::unique_ptr<Array> WriteNumericData(const mxArray* data,
-                                        const std::shared_ptr<Buffer> validity_bitmap) {
-  // Alias the type name for the underlying MATLAB type.
-  using MatlabType = typename MatlabTraits<ArrowDataType>::MatlabType;
-
-  // Get a pointer to the underlying mxArray data.
-  // We need to (temporarily) cast away const here since the mxGet* functions do not
-  // accept a const input parameter for compatibility reasons.
-  const MatlabType* dt = MatlabTraits<ArrowDataType>::GetData(const_cast<mxArray*>(data));
-
-  // Construct an arrow::Buffer that points to the underlying mxArray without copying.
-  // - The lifetime of the mxArray buffer exceeds that of the arrow::Buffer here since
-  //   MATLAB should only free this region on garbage-collection after the MEX function
-  //   is executed. Therefore it is safe for arrow::Buffer to point to this location.
-  // - However arrow::Buffer must not free this region by itself, since that could cause
-  //   segfaults if the input array is used later in MATLAB.
-  //   - The Doxygen doc for arrow::Buffer's constructor implies that it is not an RAII
-  //     type, so this should be safe from possible double-free here.
-  std::shared_ptr<Buffer> buffer =
-      std::make_shared<Buffer>(reinterpret_cast<const uint8_t*>(dt),
-                               mxGetElementSize(data) * mxGetNumberOfElements(data));
-
-  // Construct arrow::NumericArray specialization using arrow::Buffer.
-  // Pass in nulls information...we could compute and provide the number of nulls here too,
-  // but passing -1 for now so that Arrow recomputes it if necessary.
-  return std::unique_ptr<Array>(new NumericArray<ArrowDataType>(
-      mxGetNumberOfElements(data), buffer, validity_bitmap, -1));
-}
-
-// Dispatch MATLAB column data to the correct arrow::Array converter.
-std::unique_ptr<Array> WriteVariableData(const mxArray* data, const std::string& type,
-                                         const std::shared_ptr<Buffer> validity_bitmap) {
-  // Get the underlying type of the mxArray data.
-  const mxClassID mxclass = mxGetClassID(data);
-
-  switch (mxclass) {
-    case mxSINGLE_CLASS:
-      return WriteNumericData<FloatType>(data, validity_bitmap);
-    case mxDOUBLE_CLASS:
-      return WriteNumericData<DoubleType>(data, validity_bitmap);
-    case mxUINT8_CLASS:
-      return WriteNumericData<UInt8Type>(data, validity_bitmap);
-    case mxUINT16_CLASS:
-      return WriteNumericData<UInt16Type>(data, validity_bitmap);
-    case mxUINT32_CLASS:
-      return WriteNumericData<UInt32Type>(data, validity_bitmap);
-    case mxUINT64_CLASS:
-      return WriteNumericData<UInt64Type>(data, validity_bitmap);
-    case mxINT8_CLASS:
-      return WriteNumericData<Int8Type>(data, validity_bitmap);
-    case mxINT16_CLASS:
-      return WriteNumericData<Int16Type>(data, validity_bitmap);
-    case mxINT32_CLASS:
-      return WriteNumericData<Int32Type>(data, validity_bitmap);
-    case mxINT64_CLASS:
-      return WriteNumericData<Int64Type>(data, validity_bitmap);
-
-    default: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:UnsupportedArrowType",
-                        "Unsupported arrow::Type '%s' for variable '%s'",
-                        mxGetClassName(data), type.c_str());
-    }
-  }
-
-  // We shouldn't ever reach this branch, but if we do, return nullptr.
-  return nullptr;
-}
-
-}  // namespace internal
-
-Status FeatherWriter::Open(const std::string& filename,
-                           std::shared_ptr<FeatherWriter>* feather_writer) {
-  // Allocate shared_ptr out parameter.
-  *feather_writer = std::shared_ptr<FeatherWriter>(new FeatherWriter());
-
-  // Open a FileOutputStream corresponding to the provided filename.
-  std::shared_ptr<io::OutputStream> writable_file(nullptr);
-  ARROW_RETURN_NOT_OK(io::FileOutputStream::Open(filename, &writable_file));
-
-  // TableWriter::Open expects a shared_ptr to an OutputStream.
-  // Open the Feather file for writing with a TableWriter.
-  return ipc::feather::TableWriter::Open(writable_file,
-                                         &(*feather_writer)->table_writer_);
-}
-
-// Write table metadata to the Feather file from a mxArray*.
-void FeatherWriter::WriteMetadata(const mxArray* metadata) {
-  // Verify that all required fieldnames are provided.
-  internal::ValidateMxStructField(metadata, "Description", mxCHAR_CLASS, true);
-  internal::ValidateMxStructField(metadata, "NumRows", mxDOUBLE_CLASS, false);
-  internal::ValidateMxStructField(metadata, "NumVariables", mxDOUBLE_CLASS, false);
-
-  // Convert Description to a std::string and set on FeatherWriter and TableWriter.
-  std::string description =
-      internal::MxArrayToString(mxGetField(metadata, 0, "Description"));
-  this->description_ = description;
-  this->table_writer_->SetDescription(description);
-
-  // Get the NumRows field in the struct array and set on TableWriter.
-  this->num_rows_ = static_cast<int64_t>(mxGetScalar(mxGetField(metadata, 0, "NumRows")));
-  this->table_writer_->SetNumRows(this->num_rows_);
-
-  // Get the total number of variables. This is checked later for consistency with
-  // the provided number of columns before finishing the file write.
-  this->num_variables_ =
-      static_cast<int64_t>(mxGetScalar(mxGetField(metadata, 0, "NumVariables")));
-}
-
-// Write mxArrays from MATLAB into a Feather file.
-Status FeatherWriter::WriteVariables(const mxArray* variables) {
-  // Verify that all required fieldnames are provided.
-  internal::ValidateMxStructField(variables, "Name", mxCHAR_CLASS, true);
-  internal::ValidateMxStructField(variables, "Type", mxCHAR_CLASS, false);
-  internal::ValidateMxStructField(variables, "Data", mxUNKNOWN_CLASS, true);
-  internal::ValidateMxStructField(variables, "Valid", mxLOGICAL_CLASS, true);
-
-  // Get the number of columns in the struct array.
-  size_t num_columns = internal::GetNumberOfElements(variables);
-
-  // Verify that we have all the columns required for writing
-  // Currently we need all columns to be passed in together in the WriteVariables method.
-  internal::ValidateNumColumns(static_cast<int64_t>(num_columns), this->num_variables_);
-
-  // Allocate a packed validity bitmap for later arrow::Buffers to reference and populate.
-  // Since this is defined in the enclosing scope around any arrow::Buffer usage, this
-  // should outlive any arrow::Buffers created on this range, thus avoiding dangling
-  // references.
-  std::shared_ptr<ResizableBuffer> validity_bitmap;
-  ARROW_RETURN_NOT_OK(AllocateResizableBuffer(internal::BitPackedLength(this->num_rows_),
-                                              &validity_bitmap));
-
-  // Iterate over the input columns and generate arrow arrays.
-  for (int idx = 0; idx < num_columns; ++idx) {
-    // Unwrap constituent mxArray*s from the mxStructArray*. This is safe since we
-    // already checked for existence and non-nullness of these types.
-    const mxArray* name = mxGetField(variables, idx, "Name");
-    const mxArray* data = mxGetField(variables, idx, "Data");
-    const mxArray* type = mxGetField(variables, idx, "Type");
-    const mxArray* valid = mxGetField(variables, idx, "Valid");
-
-    // Convert column and type name to a std::string from mxArray*.
-    std::string name_str = internal::MxArrayToString(name);
-    std::string type_str = internal::MxArrayToString(type);
-
-    // Populate bit-packed arrow::Buffer using validity data in the mxArray*.
-    internal::BitPackBuffer(valid, validity_bitmap);
-
-    // Wrap mxArray data in an arrow::Array of the equivalent type.
-    std::unique_ptr<Array> array =
-        internal::WriteVariableData(data, type_str, validity_bitmap);
-
-    // Verify that the arrow::Array has the right number of elements.
-    internal::ValidateNumRows(array->length(), this->num_rows_);
-
-    // Write another column to the Feather file.
-    ARROW_RETURN_NOT_OK(this->table_writer_->Append(name_str, *array));
-  }
-
-  // Write the Feather file metadata to the end of the file.
-  return this->table_writer_->Finalize();
-}
-
-}  // namespace matlab
-}  // namespace arrow
diff --git a/matlab/src/feather_writer.h b/matlab/src/feather_writer.h
deleted file mode 100644
index 4b402e0..0000000
--- a/matlab/src/feather_writer.h
+++ /dev/null
@@ -1,73 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#pragma once
-
-#include <memory>
-#include <string>
-
-#include <arrow/ipc/feather.h>
-#include <arrow/status.h>
-#include <arrow/type.h>
-
-#include <matrix.h>
-
-namespace arrow {
-namespace matlab {
-
-class FeatherWriter {
- public:
-  ~FeatherWriter() = default;
-
-  /// \brief Write Feather file metadata using information from an mxArray* struct.
-  ///        The input mxArray must be a scalar struct array with the following fields:
-  ///         - "Description" :: Nx1 mxChar array, table-level description
-  ///         - "NumRows" :: scalar mxDouble array, number of rows in table
-  ///         - "NumVariables" :: scalar mxDouble array, total number of variables
-  /// \param[in] metadata mxArray* scalar struct containing table-level metadata
-  void WriteMetadata(const mxArray* metadata);
-
-  /// \brief Write mxArrays to a Feather file. The input must be a N-by-1 mxStruct
-  //         array with the following fields:
-  ///         - "Name" :: Nx1 mxChar array, name of the column
-  ///         - "Type" :: Nx1 mxChar array, the variable's MATLAB datatype
-  ///         - "Data" :: Nx1 mxArray, data for this variable
-  ///         - "Valid" :: Nx1 mxLogical array, 0 represents invalid (null) values and
-  ///                                           1 represents valid (non-null) values
-  /// \param[in] variables mxArray* struct array containing table variable data
-  /// \return status
-  Status WriteVariables(const mxArray* variables);
-
-  /// \brief Initialize a FeatherWriter object that writes to a Feather file
-  /// \param[in] filename path to the new Feather file
-  /// \param[out] feather_writer uninitialized FeatherWriter object
-  /// \return status
-  static Status Open(const std::string& filename,
-                     std::shared_ptr<FeatherWriter>* feather_writer);
-
- private:
-  FeatherWriter() = default;
-
-  std::unique_ptr<ipc::feather::TableWriter> table_writer_;
-  int64_t num_rows_;
-  int64_t num_variables_;
-  std::string description_;
-};
-
-}  // namespace matlab
-}  // namespace arrow
-
diff --git a/matlab/src/featherread.m b/matlab/src/featherread.m
deleted file mode 100644
index 4ac8a56..0000000
--- a/matlab/src/featherread.m
+++ /dev/null
@@ -1,90 +0,0 @@
-function t = featherread(filename)
-%FEATHERREAD Create a table by reading from a Feather file.
-%   Use the FEATHERREAD function to create a table by reading
-%   column-oriented data from a Feather file.
-%
-%   T = FEATHERREAD(FILENAME) creates a table by reading from the Feather
-%   file FILENAME.
-
-% Licensed to the Apache Software Foundation (ASF) under one
-% or more contributor license agreements.  See the NOTICE file
-% distributed with this work for additional information
-% regarding copyright ownership.  The ASF licenses this file
-% to you under the Apache License, Version 2.0 (the
-% "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing,
-% software distributed under the License is distributed on an
-% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-% KIND, either express or implied.  See the License for the
-% specific language governing permissions and limitations
-% under the License.
-
-import mlarrow.util.*;
-
-% Validate input arguments.
-narginchk(1, 1);
-filename = convertStringsToChars(filename);
-if ~ischar(filename)
-    error('MATLAB:arrow:InvalidFilenameDatatype', ...
-        'Filename must be a character vector or string scalar.');
-end
-
-% FOPEN can be used to search for files without an extension on the MATLAB
-% path.
-fid = fopen(filename);
-if fid ~= -1
-    filename = fopen(fid);
-    fclose(fid);
-else
-    error('MATLAB:arrow:UnableToOpenFile', ...
-        'Unable to open file %s.', filename);
-end
-
-% Read table variables and metadata from the given Feather file using
-% libarrow.
-[variables, metadata] = featherreadmex(filename);
-
-% Make valid MATLAB table variable names out of any of the
-% Feather table column names that are not valid MATLAB table
-% variable names.
-[variableNames, variableDescriptions] = makeValidMATLABTableVariableNames({variables.Name});
-
-% Iterate over each table variable, handling invalid (null) entries
-% and invalid MATLAB table variable names appropriately.
-% Note: All Arrow arrays can have an associated validity (null) bitmap.
-% The Apache Arrow specification defines 0 (false) to represent an
-% invalid (null) array entry and 1 (true) to represent a valid
-% (non-null) array entry.
-for ii = 1:length(variables)
-    if ~all(variables(ii).Valid)
-        switch variables(ii).Type
-            case {'uint8', 'uint16', 'uint32', 'uint64', 'int8', 'int16', 'int32', 'int64'}
-                % MATLAB does not support missing values for integer types, so
-                % cast to double and set missing values to NaN in this case.
-                variables(ii).Data = double(variables(ii).Data);
-        end
-
-        % Set invalid (null) entries to the appropriate MATLAB missing value using
-        % logical indexing.
-        variables(ii).Data(~variables(ii).Valid) = missing;
-    end
-end
-
-% Construct a MATLAB table from the Feather file data.
-t = table(variables.Data, 'VariableNames', cellstr(variableNames));
-
-% Store original Feather table column names in the table.Properties.VariableDescriptions
-% property if they were modified to be valid MATLAB table variable names.
-if ~isempty(variableDescriptions)
-    t.Properties.VariableDescriptions = cellstr(variableDescriptions);
-end
-
-% Set the Description property of the table based on the Feather file
-% description.
-t.Properties.Description = metadata.Description;
-
-end
diff --git a/matlab/src/featherreadmex.cc b/matlab/src/featherreadmex.cc
deleted file mode 100644
index b52b8a9..0000000
--- a/matlab/src/featherreadmex.cc
+++ /dev/null
@@ -1,37 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#include <string>
-
-#include <mex.h>
-
-#include "feather_reader.h"
-#include "util/handle_status.h"
-
-// MEX gateway function. This is the entry point for featherreadmex.cpp.
-void mexFunction(int nlhs, mxArray* plhs[], int nrhs, const mxArray* prhs[]) {
-  const std::string filename{mxArrayToUTF8String(prhs[0])};
-
-  // Read the given Feather file into memory.
-  std::shared_ptr<arrow::matlab::FeatherReader> feather_reader{nullptr};
-  arrow::matlab::util::HandleStatus(
-      arrow::matlab::FeatherReader::Open(filename, &feather_reader));
-
-  // Return the Feather file table variables and table metadata to MATLAB.
-  plhs[0] = feather_reader->ReadVariables();
-  plhs[1] = feather_reader->ReadMetadata();
-}
diff --git a/matlab/src/featherwrite.m b/matlab/src/featherwrite.m
deleted file mode 100644
index eeedf26..0000000
--- a/matlab/src/featherwrite.m
+++ /dev/null
@@ -1,44 +0,0 @@
-function featherwrite(filename, t)
-%FEATHERWRITE Write a table to a Feather file.
-%   Use the FEATHERWRITE function to write a table to
-%   a Feather file as column-oriented data.
-%
-%   FEATHERWRITE(FILENAME,T) writes the table T to a Feather
-%   file FILENAME as column-oriented data.
-
-% Licensed to the Apache Software Foundation (ASF) under one
-% or more contributor license agreements.  See the NOTICE file
-% distributed with this work for additional information
-% regarding copyright ownership.  The ASF licenses this file
-% to you under the Apache License, Version 2.0 (the
-% "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing,
-% software distributed under the License is distributed on an
-% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-% KIND, either express or implied.  See the License for the
-% specific language governing permissions and limitations
-% under the License.
-
-import mlarrow.util.table2mlarrow;
-
-% Validate input arguments.
-narginchk(2, 2);
-filename = convertStringsToChars(filename);
-if ~ischar(filename)
-    error('MATLAB:arrow:InvalidFilenameDatatype', ...
-        'Filename must be a character vector or string scalar.');
-end
-if ~istable(t)
-    error('MATLAB:arrow:InvalidInputTable', 't must be a table.');
-end
-
-[variables, metadata] = table2mlarrow(t);
-
-% Write the table to a Feather file.
-featherwritemex(filename, variables, metadata);
-
-end
diff --git a/matlab/src/featherwritemex.cc b/matlab/src/featherwritemex.cc
deleted file mode 100644
index 3a6815e..0000000
--- a/matlab/src/featherwritemex.cc
+++ /dev/null
@@ -1,37 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#include <string>
-
-#include <mex.h>
-
-#include "feather_writer.h"
-#include "util/handle_status.h"
-
-// MEX gateway function. This is the entry point for featherwritemex.cc.
-void mexFunction(int nlhs, mxArray* plhs[], int nrhs, const mxArray* prhs[]) {
-  const std::string filename{mxArrayToUTF8String(prhs[0])};
-
-  // Open a Feather file at the provided file path for writing.
-  std::shared_ptr<arrow::matlab::FeatherWriter> feather_writer{nullptr};
-  arrow::matlab::util::HandleStatus(
-      arrow::matlab::FeatherWriter::Open(filename, &feather_writer));
-
-  // Write the Feather file table variables and table metadata from MATLAB.
-  feather_writer->WriteMetadata(prhs[2]);
-  arrow::matlab::util::HandleStatus(feather_writer->WriteVariables(prhs[1]));
-}
diff --git a/matlab/src/matlab_traits.h b/matlab/src/matlab_traits.h
deleted file mode 100644
index a76539f..0000000
--- a/matlab/src/matlab_traits.h
+++ /dev/null
@@ -1,103 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#pragma once
-
-#include <arrow/type.h>
-
-#include <matrix.h>
-
-namespace arrow {
-namespace matlab {
-
-/// \brief A type traits class mapping Arrow types to MATLAB types.
-template <typename ArrowDataType>
-struct MatlabTraits;
-
-template <>
-struct MatlabTraits<FloatType> {
-  static constexpr mxClassID matlab_class_id = mxSINGLE_CLASS;
-  using MatlabType = mxSingle;
-  static MatlabType* GetData(mxArray* pa) { return mxGetSingles(pa); }
-};
-
-template <>
-struct MatlabTraits<DoubleType> {
-  static constexpr mxClassID matlab_class_id = mxDOUBLE_CLASS;
-  using MatlabType = mxDouble;
-  static MatlabType* GetData(mxArray* pa) { return mxGetDoubles(pa); }
-};
-
-template <>
-struct MatlabTraits<UInt8Type> {
-  static constexpr mxClassID matlab_class_id = mxUINT8_CLASS;
-  using MatlabType = mxUint8;
-  static MatlabType* GetData(mxArray* pa) { return mxGetUint8s(pa); }
-};
-
-template <>
-struct MatlabTraits<UInt16Type> {
-  static constexpr mxClassID matlab_class_id = mxUINT16_CLASS;
-  using MatlabType = mxUint16;
-  static MatlabType* GetData(mxArray* pa) { return mxGetUint16s(pa); }
-};
-
-template <>
-struct MatlabTraits<UInt32Type> {
-  static constexpr mxClassID matlab_class_id = mxUINT32_CLASS;
-  using MatlabType = mxUint32;
-  static MatlabType* GetData(mxArray* pa) { return mxGetUint32s(pa); }
-};
-
-template <>
-struct MatlabTraits<UInt64Type> {
-  static constexpr mxClassID matlab_class_id = mxUINT64_CLASS;
-  using MatlabType = mxUint64;
-  static MatlabType* GetData(mxArray* pa) { return mxGetUint64s(pa); }
-};
-
-template <>
-struct MatlabTraits<Int8Type> {
-  static constexpr mxClassID matlab_class_id = mxINT8_CLASS;
-  using MatlabType = mxInt8;
-  static MatlabType* GetData(mxArray* pa) { return mxGetInt8s(pa); }
-};
-
-template <>
-struct MatlabTraits<Int16Type> {
-  static constexpr mxClassID matlab_class_id = mxINT16_CLASS;
-  using MatlabType = mxInt16;
-  static MatlabType* GetData(mxArray* pa) { return mxGetInt16s(pa); }
-};
-
-template <>
-struct MatlabTraits<Int32Type> {
-  static constexpr mxClassID matlab_class_id = mxINT32_CLASS;
-  using MatlabType = mxInt32;
-  static MatlabType* GetData(mxArray* pa) { return mxGetInt32s(pa); }
-};
-
-template <>
-struct MatlabTraits<Int64Type> {
-  static constexpr mxClassID matlab_class_id = mxINT64_CLASS;
-  using MatlabType = mxInt64;
-  static MatlabType* GetData(mxArray* pa) { return mxGetInt64s(pa); }
-};
-
-}  // namespace matlab
-}  // namespace arrow
-
diff --git a/matlab/src/util/handle_status.cc b/matlab/src/util/handle_status.cc
deleted file mode 100644
index f1c3b7f..0000000
--- a/matlab/src/util/handle_status.cc
+++ /dev/null
@@ -1,91 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#include <arrow/status.h>
-
-#include <mex.h>
-
-namespace arrow {
-namespace matlab {
-namespace util {
-
-void HandleStatus(const Status& status) {
-  const char* arrow_error_message = "Arrow error: %s";
-  switch (status.code()) {
-    case StatusCode::OK: {
-      break;
-    }
-    case StatusCode::OutOfMemory: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:OutOfMemory", arrow_error_message,
-                        status.ToString().c_str());
-      break;
-    }
-    case StatusCode::KeyError: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:KeyError", arrow_error_message,
-                        status.ToString().c_str());
-      break;
-    }
-    case StatusCode::TypeError: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:TypeError", arrow_error_message,
-                        status.ToString().c_str());
-      break;
-    }
-    case StatusCode::Invalid: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:Invalid", arrow_error_message,
-                        status.ToString().c_str());
-      break;
-    }
-    case StatusCode::IOError: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:IOError", arrow_error_message,
-                        status.ToString().c_str());
-      break;
-    }
-    case StatusCode::CapacityError: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:CapacityError", arrow_error_message,
-                        status.ToString().c_str());
-      break;
-    }
-    case StatusCode::IndexError: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:IndexError", arrow_error_message,
-                        status.ToString().c_str());
-      break;
-    }
-    case StatusCode::UnknownError: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:UnknownError", arrow_error_message,
-                        status.ToString().c_str());
-      break;
-    }
-    case StatusCode::NotImplemented: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:NotImplemented", arrow_error_message,
-                        status.ToString().c_str());
-      break;
-    }
-    case StatusCode::SerializationError: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:SerializationError", arrow_error_message,
-                        status.ToString().c_str());
-      break;
-    }
-    default: {
-      mexErrMsgIdAndTxt("MATLAB:arrow:status:UnknownStatus", arrow_error_message,
-                        "Unknown status");
-      break;
-    }
-  }
-}
-}  // namespace util
-}  // namespace matlab
-}  // namespace arrow
diff --git a/matlab/src/util/handle_status.h b/matlab/src/util/handle_status.h
deleted file mode 100644
index 7212114..0000000
--- a/matlab/src/util/handle_status.h
+++ /dev/null
@@ -1,32 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#pragma once
-
-#include <arrow/status.h>
-
-namespace arrow {
-namespace matlab {
-namespace util {
-// Terminates execution and returns to the MATLAB prompt,
-// displaying an error message if the given status
-// indicates that an error has occurred.
-void HandleStatus(const Status& status);
-}  // namespace util
-}  // namespace matlab
-}  // namespace arrow
-
diff --git a/matlab/src/util/unicode_conversion.cc b/matlab/src/util/unicode_conversion.cc
deleted file mode 100644
index 01c2e4b..0000000
--- a/matlab/src/util/unicode_conversion.cc
+++ /dev/null
@@ -1,63 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#include <locale> /* for std::wstring_convert */
-#include <codecvt> /* for std::codecvt_utf8_utf16 */
-
-#include "unicode_conversion.h"
-
-namespace arrow {
-namespace matlab {
-namespace util {
-
-mxArray* ConvertUTF8StringToUTF16CharMatrix(const std::string& utf8_string) {
-  // Get pointers to the start and end of the std::string data.
-  const char* string_start = utf8_string.c_str();
-  const char* string_end = string_start + utf8_string.length();
-
-  // Due to this issue on MSVC: https://stackoverflow.com/q/32055357 we cannot 
-  // directly use a destination type of char16_t.
-#if _MSC_VER >= 1900
-  using CharType = int16_t;
-#else
-  using CharType = char16_t;
-#endif
-  using ConverterType = std::codecvt_utf8_utf16<CharType>;
-  std::wstring_convert<ConverterType, CharType> code_converter{};
-
-  std::basic_string<CharType> utf16_string;
-  try {
-    utf16_string = code_converter.from_bytes(string_start, string_end);
-  } catch (...) {
-    // In the case that any error occurs, just try returning a string in the 
-    // user's current locale instead.
-    return mxCreateString(string_start);
-  }
-
-  // Store the converter UTF-16 string in a mxCharMatrix and return it.
-  const mwSize dimensions[2] = {1, utf16_string.size()};
-  mxArray* character_matrix = mxCreateCharArray(2, dimensions);
-  mxChar* character_matrix_pointer = mxGetChars(character_matrix);
-  std::copy(utf16_string.data(), utf16_string.data() + utf16_string.size(), 
-      character_matrix_pointer);
-
-  return character_matrix;
-}
-
-}  // namespace util
-}  // namespace matlab
-}  // namespace arrow
diff --git a/matlab/src/util/unicode_conversion.h b/matlab/src/util/unicode_conversion.h
deleted file mode 100644
index fa905cb..0000000
--- a/matlab/src/util/unicode_conversion.h
+++ /dev/null
@@ -1,32 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-#pragma once
-
-#include <string>
-#include <mex.h>
-
-namespace arrow {
-namespace matlab {
-namespace util {
-// Converts a UTF-8 encoded std::string to a heap-allocated UTF-16 encoded
-// mxCharArray.
-mxArray* ConvertUTF8StringToUTF16CharMatrix(const std::string& utf8_string);
-}  // namespace util
-}  // namespace matlab
-}  // namespace arrow
-
diff --git a/matlab/test/tfeather.m b/matlab/test/tfeather.m
deleted file mode 100755
index 625a3a5..0000000
--- a/matlab/test/tfeather.m
+++ /dev/null
@@ -1,232 +0,0 @@
-classdef tfeather < matlab.unittest.TestCase
-    % Tests for MATLAB featherread and featherwrite.
-
-    % Licensed to the Apache Software Foundation (ASF) under one or more
-    % contributor license agreements.  See the NOTICE file distributed with
-    % this work for additional information regarding copyright ownership.
-    % The ASF licenses this file to you under the Apache License, Version
-    % 2.0 (the "License"); you may not use this file except in compliance
-    % with the License.  You may obtain a copy of the License at
-    %
-    %   http://www.apache.org/licenses/LICENSE-2.0
-    %
-    % Unless required by applicable law or agreed to in writing, software
-    % distributed under the License is distributed on an "AS IS" BASIS,
-    % WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-    % implied.  See the License for the specific language governing
-    % permissions and limitations under the License.
-    
-    methods(TestClassSetup)
-        
-        function addFeatherFunctionsToMATLABPath(testCase)
-            import matlab.unittest.fixtures.PathFixture
-            % Add Feather test utilities to the MATLAB path.
-            testCase.applyFixture(PathFixture('util'));
-            % Add featherread and featherwrite to the MATLAB path.
-            testCase.applyFixture(PathFixture(fullfile('..', 'src')));
-            % featherreadmex must be on the MATLAB path.
-            testCase.assertTrue(~isempty(which('featherreadmex')), ...
-                '''featherreadmex'' must be on the MATLAB path. Use ''addpath'' to add folders to the MATLAB path.');
-            % featherwritemex must be on the MATLAB path.
-            testCase.assertTrue(~isempty(which('featherwritemex')), ...
-                '''featherwritemex'' must be on to the MATLAB path. Use ''addpath'' to add folders to the MATLAB path.');
-        end
-        
-    end
-    
-    methods(TestMethodSetup)
-    
-        function setupTempWorkingDirectory(testCase)
-            import matlab.unittest.fixtures.WorkingFolderFixture;
-            testCase.applyFixture(WorkingFolderFixture);
-        end
-        
-    end
-    
-    methods(Test)
-
-        function NumericDatatypesNoNulls(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            actualTable = createTable;
-            expectedTable = featherRoundTrip(filename, actualTable);
-            testCase.verifyEqual(actualTable, expectedTable);
-        end
-
-        function NumericDatatypesWithNaNRow(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            t = createTable;
-            
-            variableNames = {'single', ...
-                             'double', ...
-                             'int8', ...
-                             'int16', ...
-                             'int32', ...
-                             'int64', ...
-                             'uint8', ...
-                             'uint16', ...
-                             'uint32', ...
-                             'uint64'};
-            variableTypes = repmat({'double'}, 10, 1)';
-            numRows = 1;
-            numVariables = 10;
-            
-            addRow = table('Size', [numRows, numVariables], ...
-                           'VariableTypes', variableTypes, ...
-                           'VariableNames', variableNames);
-            addRow(1,:) = {NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN};
-            actualTable = [t; addRow];
-            expectedTable = featherRoundTrip(filename, actualTable);
-            testCase.verifyEqual(actualTable, expectedTable);
-        end
-
-        function NumericDatatypesWithNaNColumns(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            actualTable = createTable;
-            actualTable.double = [NaN; NaN; NaN];
-            actualTable.int64  = [NaN; NaN; NaN];
-            
-            expectedTable = featherRoundTrip(filename, actualTable);
-            testCase.verifyEqual(actualTable, expectedTable);
-        end
-        
-        function NumericDatatypesWithExpInfSciNotation(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            actualTable = createTable;
-            actualTable.single(2) = 1.0418e+06;
-            
-            actualTable.double(1) = Inf;
-            actualTable.double(2) = exp(9);
-            
-            actualTable.int64(2) = 1.0418e+03;
-           
-            expectedTable = featherRoundTrip(filename, actualTable);
-            testCase.verifyEqual(actualTable, expectedTable);
-        end
-        
-        function IgnoreRowVarNames(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            actualTable = createTable;
-            time = {'day1', 'day2', 'day3'};
-            actualTable.Properties.RowNames = time;
-            expectedTable = featherRoundTrip(filename, actualTable);
-            actualTable = createTable;
-            testCase.verifyEqual(actualTable, expectedTable);
-        end
-
-        function NotFeatherExtension(testCase)
-            filename = fullfile(pwd, 'temp.txt');
-            
-            actualTable = createTable;
-            expectedTable = featherRoundTrip(filename, actualTable);
-            testCase.verifyEqual(actualTable, expectedTable);
-        end
-        
-        function EmptyTable(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            actualTable = table;
-            expectedTable = featherRoundTrip(filename, actualTable);
-            testCase.verifyEqual(actualTable, expectedTable);
-        end
-
-        function zeroByNTable(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            actualTable = createTable;
-            actualTable([1, 2], :) = [];
-            expectedTable = featherRoundTrip(filename, actualTable);
-            testCase.verifyEqual(actualTable, expectedTable);
-        end
-
-        % %%%%%%%%%%%%%%%%%%%
-        % Negative test cases
-        % %%%%%%%%%%%%%%%%%%%
-
-        function ErrorIfUnableToOpenFile(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-
-            testCase.verifyError(@() featherread(filename), 'MATLAB:arrow:UnableToOpenFile');
-        end
-
-        function ErrorIfCorruptedFeatherFile(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            t = createTable;
-            featherwrite(filename, t);
-            
-            fileID = fopen(filename, 'w');
-            fwrite(fileID, [1; 5]);
-            fclose(fileID);
-            
-            testCase.verifyError(@() featherread(filename), 'MATLAB:arrow:status:Invalid');
-        end
-        
-        function ErrorIfInvalidFilenameDatatype(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            t = createTable;
-            
-            testCase.verifyError(@() featherwrite({filename}, t), 'MATLAB:arrow:InvalidFilenameDatatype');
-            testCase.verifyError(@() featherread({filename}), 'MATLAB:arrow:InvalidFilenameDatatype');
-        end
-
-        function ErrorIfTooManyInputs(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            t = createTable;
-
-            testCase.verifyError(@() featherwrite(filename, t, 'SomeValue', 'SomeOtherValue'), 'MATLAB:TooManyInputs');
-            testCase.verifyError(@() featherread(filename, 'SomeValue', 'SomeOtherValue'), 'MATLAB:TooManyInputs');
-        end
-
-        function ErrorIfTooFewInputs(testCase)
-            testCase.verifyError(@() featherwrite(), 'MATLAB:narginchk:notEnoughInputs');
-            testCase.verifyError(@() featherread(), 'MATLAB:narginchk:notEnoughInputs');
-        end
-        
-        function ErrorIfMultiColVarExist(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            age           = [38; 43; 38; 40; 49];
-            smoker        = logical([1; 0; 1; 0; 1]);
-            height        = [71; 69; 64; 67; 64];
-            weight        = [176; 163; 131; 133; 119];
-            bloodPressure = [124, 93; 109, 77; 125, 83; 117, 75; 122, 80];
-            
-            t = table(age, smoker, height, weight, bloodPressure);
-            
-            testCase.verifyError(@() featherwrite(filename, t), 'MATLAB:arrow:UnsupportedVariableType');
-        end
-        
-        function UnsupportedMATLABDatatypes(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-
-            actualTable = createTable;
-            calendarDurationVariable = [calendarDuration(1, 7, 9); ...
-                                        calendarDuration(2, 1, 1); ...
-                                        calendarDuration(5, 3, 2)];
-            actualTable = addvars(actualTable, calendarDurationVariable);
-
-            testCase.verifyError(@() featherwrite(filename, actualTable) ,'MATLAB:arrow:UnsupportedVariableType');
-        end
-        
-        function NumericComplexUnsupported(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-
-            actualTable = createTable;
-            actualTable.single(1) = 1.0418 + 2i;
-            actualTable.double(2) = exp(9) + 5i;
-            actualTable.int64(2) = 1.0418e+03;
-           
-            expectedTable = featherRoundTrip(filename, actualTable);
-            testCase.verifyNotEqual(actualTable, expectedTable);
-        end
-        
-    end
-    
-end
diff --git a/matlab/test/tfeathermex.m b/matlab/test/tfeathermex.m
deleted file mode 100644
index fa79b4b..0000000
--- a/matlab/test/tfeathermex.m
+++ /dev/null
@@ -1,76 +0,0 @@
-classdef tfeathermex < matlab.unittest.TestCase
-    % Tests for MATLAB featherreadmex and featherwritemex.
-    
-    % Licensed to the Apache Software Foundation (ASF) under one or more
-    % contributor license agreements.  See the NOTICE file distributed with
-    % this work for additional information regarding copyright ownership.
-    % The ASF licenses this file to you under the Apache License, Version
-    % 2.0 (the "License"); you may not use this file except in compliance
-    % with the License.  You may obtain a copy of the License at
-    %
-    %   http://www.apache.org/licenses/LICENSE-2.0
-    %
-    % Unless required by applicable law or agreed to in writing, software
-    % distributed under the License is distributed on an "AS IS" BASIS,
-    % WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-    % implied.  See the License for the specific language governing
-    % permissions and limitations under the License.
-    
-    methods(TestClassSetup)
-        
-        function addFeatherFunctionsToMATLABPath(testCase)
-            import matlab.unittest.fixtures.PathFixture
-            % Add Feather test utilities to the MATLAB path.
-            testCase.applyFixture(PathFixture('util'));
-            % Add featherread and featherwrite to the MATLAB path.
-            testCase.applyFixture(PathFixture(fullfile('..', 'src')));
-            % featherreadmex must be on the MATLAB path.
-            testCase.assertTrue(~isempty(which('featherreadmex')), ...
-                '''featherreadmex'' must be on the MATLAB path. Use ''addpath'' to add folders to the MATLAB path.');
-            % featherwritemex must be on the MATLAB path.
-            testCase.assertTrue(~isempty(which('featherwritemex')), ...
-                '''featherwritemex'' must be on to the MATLAB path. Use ''addpath'' to add folders to the MATLAB path.');
-        end
-        
-    end
-    
-    methods(TestMethodSetup)
-    
-        function setupTempWorkingDirectory(testCase)
-            import matlab.unittest.fixtures.WorkingFolderFixture;
-            testCase.applyFixture(WorkingFolderFixture);
-        end
-        
-    end
-    
-    methods(Test)
-        
-        function NumericDatatypesNulls(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            [expectedVariables, expectedMetadata] = createVariablesAndMetadataStructs();
-            [actualVariables, ~] = featherMEXRoundTrip(filename, expectedVariables, expectedMetadata);
-            testCase.verifyEqual([actualVariables.Valid], [expectedVariables.Valid]);
-        end
-        
-        function InvalidMATLABTableVariableNames(testCase)
-            filename = fullfile(pwd, 'temp.feather');
-            
-            % Create a table with an invalid MATLAB table variable name.
-            invalidVariable = mlarrow.util.createVariableStruct('double', 1, true, '@');
-            validVariable = mlarrow.util.createVariableStruct('double', 1, true, 'Valid');
-            variables = [invalidVariable, validVariable];
-            metadata = mlarrow.util.createMetadataStruct('', 1, 2);
-            featherwritemex(filename, variables, metadata);
-            t = featherread(filename);
-            
-            testCase.verifyEqual(t.Properties.VariableNames{1}, 'x_');
-            testCase.verifyEqual(t.Properties.VariableNames{2}, 'Valid');
-            
-            testCase.verifyEqual(t.Properties.VariableDescriptions{1}, 'Original variable name: ''@''');
-            testCase.verifyEqual(t.Properties.VariableDescriptions{2}, '');
-        end
-        
-    end
-
-end
diff --git a/matlab/test/util/createTable.m b/matlab/test/util/createTable.m
deleted file mode 100644
index 2bf67c6..0000000
--- a/matlab/test/util/createTable.m
+++ /dev/null
@@ -1,68 +0,0 @@
-function t = createTable()
-% CREATETABLE Helper function for creating test table.
-
-% Licensed to the Apache Software Foundation (ASF) under one or more
-% contributor license agreements.  See the NOTICE file distributed with
-% this work for additional information regarding copyright ownership.
-% The ASF licenses this file to you under the Apache License, Version
-% 2.0 (the "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS,
-% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-% implied.  See the License for the specific language governing
-% permissions and limitations under the License.
-
-variableNames = {'uint8', ...
-                 'uint16', ...
-                 'uint32', ...
-                 'uint64', ...
-                 'int8', ...
-                 'int16', ...
-                 'int32', ...
-                 'int64', ...
-                 'single', ...
-                 'double'};
-
-variableTypes = {'uint8', ...
-                 'uint16', ...
-                 'uint32', ...
-                 'uint64', ...
-                 'int8', ...
-                 'int16', ...
-                 'int32', ...
-                 'int64', ...
-                 'single', ...
-                 'double'};
-
-uint8Data  = uint8([1; 2; 3]);
-uint16Data = uint16([1; 2; 3]);
-uint32Data = uint32([1; 2; 3]);
-uint64Data = uint64([1; 2; 3]);
-int8Data   = int8([1; 2; 3]);
-int16Data  = int16([1; 2; 3]);
-int32Data  = int32([1; 2; 3]);
-int64Data  = int64([1; 2; 3]);
-singleData = single([1/2; 1/4; 1/8]);
-doubleData = double([1/10; 1/100; 1/1000]);
-
-numRows = 3;
-numVariables = 10;
-
-t = table('Size', [numRows, numVariables], 'VariableTypes', variableTypes, 'VariableNames', variableNames);
-
-t.uint8  = uint8Data;
-t.uint16 = uint16Data;
-t.uint32 = uint32Data;
-t.uint64 = uint64Data;
-t.int8   = int8Data;
-t.int16  = int16Data;
-t.int32  = int32Data;
-t.int64  = int64Data;
-t.single = singleData;
-t.double = doubleData;
-
-end
\ No newline at end of file
diff --git a/matlab/test/util/createVariablesAndMetadataStructs.m b/matlab/test/util/createVariablesAndMetadataStructs.m
deleted file mode 100644
index 01a8f58..0000000
--- a/matlab/test/util/createVariablesAndMetadataStructs.m
+++ /dev/null
@@ -1,98 +0,0 @@
-function [variables, metadata] = createVariablesAndMetadataStructs()
-% CREATEVARIABLESANDMETADATASTRUCTS Helper function for creating
-% Feather MEX variables and metadata structs.
-
-% Licensed to the Apache Software Foundation (ASF) under one or more
-% contributor license agreements.  See the NOTICE file distributed with
-% this work for additional information regarding copyright ownership.
-% The ASF licenses this file to you under the Apache License, Version
-% 2.0 (the "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS,
-% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-% implied.  See the License for the specific language governing
-% permissions and limitations under the License.
-
-import mlarrow.util.*;
-
-type = 'uint8';
-data = uint8([1; 2; 3]);
-valid = logical([0; 1; 0]);
-name = 'uint8';
-uint8Variable = createVariableStruct(type, data, valid, name);
-
-type = 'uint16';
-data = uint16([1; 2; 3]);
-valid = logical([0; 1; 0]);
-name = 'uint16';
-uint16Variable = createVariableStruct(type, data, valid, name);
-
-type = 'uint32';
-data = uint32([1; 2; 3]);
-valid = logical([0; 1; 0]);
-name = 'uint32';
-uint32Variable = createVariableStruct(type, data, valid, name);
-
-type = 'uint64';
-data = uint64([1; 2; 3]);
-valid = logical([0; 1; 0]);
-name = 'uint64';
-uint64Variable = createVariableStruct(type, data, valid, name);
-
-type = 'int8';
-data = int8([1; 2; 3]);
-valid = logical([0; 1; 0]);
-name = 'int8';
-int8Variable = createVariableStruct(type, data, valid, name);
-
-type = 'int16';
-data = int16([1; 2; 3]);
-valid = logical([0; 1; 0]);
-name = 'int16';
-int16Variable = createVariableStruct(type, data, valid, name);
-
-type = 'int32';
-data = int32([1; 2; 3]);
-valid = logical([0; 1; 0]);
-name = 'int32';
-int32Variable = createVariableStruct(type, data, valid, name);
-
-type = 'int64';
-data = int64([1; 2; 3]);
-valid = logical([0; 1; 0]);
-name = 'int64';
-int64Variable = createVariableStruct(type, data, valid, name);
-
-type = 'single';
-data = single([1; 2; 3]);
-valid = logical([0; 1; 0]);
-name = 'single';
-singleVariable = createVariableStruct(type, data, valid, name);
-
-type = 'double';
-data = double([1; 2; 3]);
-valid = logical([0; 1; 0]);
-name = 'double';
-doubleVariable = createVariableStruct(type, data, valid, name);
-
-variables = [uint8Variable, ...
-             uint16Variable, ...
-             uint32Variable, ...
-             uint64Variable, ...
-             int8Variable, ...
-             int16Variable, ...
-             int32Variable, ...
-             int64Variable, ...
-             singleVariable, ...
-             doubleVariable];
-
-description = 'test';
-numRows = 3;
-numVariables = length(variables);
-
-metadata = createMetadataStruct(description, numRows, numVariables);
-end
diff --git a/matlab/test/util/featherMEXRoundTrip.m b/matlab/test/util/featherMEXRoundTrip.m
deleted file mode 100644
index 49ab183..0000000
--- a/matlab/test/util/featherMEXRoundTrip.m
+++ /dev/null
@@ -1,22 +0,0 @@
-function [variablesOut, metadataOut] = featherMEXRoundTrip(filename, variablesIn, metadataIn)
-% FEATHERMEXROUNDTRIP Helper function for round tripping variables
-% and metadata structs to a Feather file.
-
-% Licensed to the Apache Software Foundation (ASF) under one or more
-% contributor license agreements.  See the NOTICE file distributed with
-% this work for additional information regarding copyright ownership.
-% The ASF licenses this file to you under the Apache License, Version
-% 2.0 (the "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS,
-% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-% implied.  See the License for the specific language governing
-% permissions and limitations under the License.
-
-featherwritemex(filename, variablesIn, metadataIn);
-[variablesOut, metadataOut] = featherreadmex(filename);
-end
\ No newline at end of file
diff --git a/matlab/test/util/featherRoundTrip.m b/matlab/test/util/featherRoundTrip.m
deleted file mode 100644
index 18f8056..0000000
--- a/matlab/test/util/featherRoundTrip.m
+++ /dev/null
@@ -1,22 +0,0 @@
-function tableOut = featherRoundTrip(filename, tableIn)
-% FEATHERROUNDTRIP Helper function for round tripping a table
-% to a Feather file.
-
-% Licensed to the Apache Software Foundation (ASF) under one or more
-% contributor license agreements.  See the NOTICE file distributed with
-% this work for additional information regarding copyright ownership.
-% The ASF licenses this file to you under the Apache License, Version
-% 2.0 (the "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS,
-% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-% implied.  See the License for the specific language governing
-% permissions and limitations under the License.
-
-featherwrite(filename, tableIn);
-tableOut = featherread(filename);
-end
\ No newline at end of file

[arrow-rs] 01/14: Removed Ruby.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit ff4212c9a510d0493cb6609e3d8bc324b83a255a
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:19:08 2021 +0000

    Removed Ruby.
---
 ruby/Gemfile                                       |  22 -
 ruby/README.md                                     |  36 -
 ruby/Rakefile                                      |  56 --
 ruby/red-arrow-cuda/.gitignore                     |  18 -
 ruby/red-arrow-cuda/Gemfile                        |  24 -
 ruby/red-arrow-cuda/LICENSE.txt                    | 202 ------
 ruby/red-arrow-cuda/NOTICE.txt                     |   2 -
 ruby/red-arrow-cuda/README.md                      |  60 --
 ruby/red-arrow-cuda/Rakefile                       |  41 --
 ruby/red-arrow-cuda/dependency-check/Rakefile      |  47 --
 ruby/red-arrow-cuda/lib/arrow-cuda.rb              |  29 -
 .../lib/arrow-cuda/device-manager.rb               |  25 -
 ruby/red-arrow-cuda/lib/arrow-cuda/loader.rb       |  35 -
 ruby/red-arrow-cuda/lib/arrow-cuda/version.rb      |  26 -
 ruby/red-arrow-cuda/red-arrow-cuda.gemspec         |  51 --
 ruby/red-arrow-cuda/test/helper.rb                 |  20 -
 ruby/red-arrow-cuda/test/run-test.rb               |  50 --
 ruby/red-arrow-cuda/test/test-cuda.rb              |  38 -
 ruby/red-arrow-dataset/.gitignore                  |  18 -
 ruby/red-arrow-dataset/Gemfile                     |  24 -
 ruby/red-arrow-dataset/LICENSE.txt                 | 202 ------
 ruby/red-arrow-dataset/NOTICE.txt                  |   2 -
 ruby/red-arrow-dataset/README.md                   |  50 --
 ruby/red-arrow-dataset/Rakefile                    |  41 --
 ruby/red-arrow-dataset/dependency-check/Rakefile   |  47 --
 ruby/red-arrow-dataset/lib/arrow-dataset.rb        |  29 -
 .../lib/arrow-dataset/in-memory-fragment.rb        |  32 -
 .../lib/arrow-dataset/in-memory-scan-task.rb       |  35 -
 ruby/red-arrow-dataset/lib/arrow-dataset/loader.rb |  36 -
 .../lib/arrow-dataset/scan-options.rb              |  37 -
 .../red-arrow-dataset/lib/arrow-dataset/version.rb |  26 -
 ruby/red-arrow-dataset/red-arrow-dataset.gemspec   |  51 --
 ruby/red-arrow-dataset/test/helper.rb              |  20 -
 ruby/red-arrow-dataset/test/run-test.rb            |  50 --
 .../test/test-in-memory-scan-task.rb               |  33 -
 ruby/red-arrow-dataset/test/test-scan-options.rb   |  36 -
 ruby/red-arrow/.gitignore                          |  22 -
 ruby/red-arrow/.yardopts                           |   6 -
 ruby/red-arrow/Gemfile                             |  22 -
 ruby/red-arrow/LICENSE.txt                         | 202 ------
 ruby/red-arrow/NOTICE.txt                          |   2 -
 ruby/red-arrow/README.md                           |  52 --
 ruby/red-arrow/Rakefile                            | 100 ---
 ruby/red-arrow/benchmark/raw-records/boolean.yml   |  65 --
 .../red-arrow/benchmark/raw-records/decimal128.yml |  68 --
 .../red-arrow/benchmark/raw-records/dictionary.yml |  75 --
 ruby/red-arrow/benchmark/raw-records/int64.yml     |  67 --
 ruby/red-arrow/benchmark/raw-records/list.yml      |  70 --
 ruby/red-arrow/benchmark/raw-records/string.yml    |  65 --
 ruby/red-arrow/benchmark/raw-records/timestamp.yml |  75 --
 ruby/red-arrow/benchmark/values/boolean.yml        |  37 -
 ruby/red-arrow/benchmark/values/decimal128.yml     |  38 -
 ruby/red-arrow/benchmark/values/dictionary.yml     |  46 --
 ruby/red-arrow/benchmark/values/int64.yml          |  37 -
 ruby/red-arrow/benchmark/values/list.yml           |  44 --
 ruby/red-arrow/benchmark/values/string.yml         |  38 -
 ruby/red-arrow/benchmark/values/timestamp.yml      |  49 --
 ruby/red-arrow/doc/text/development.md             |  34 -
 ruby/red-arrow/example/read-file.rb                |  36 -
 ruby/red-arrow/example/read-stream.rb              |  36 -
 ruby/red-arrow/example/write-file.rb               |  63 --
 ruby/red-arrow/example/write-stream.rb             |  63 --
 ruby/red-arrow/ext/arrow/arrow.cpp                 |  81 ---
 ruby/red-arrow/ext/arrow/converters.cpp            |  42 --
 ruby/red-arrow/ext/arrow/converters.hpp            | 669 -----------------
 ruby/red-arrow/ext/arrow/extconf.rb                |  63 --
 ruby/red-arrow/ext/arrow/raw-records.cpp           | 183 -----
 ruby/red-arrow/ext/arrow/red-arrow.hpp             |  95 ---
 ruby/red-arrow/ext/arrow/values.cpp                | 156 ----
 ruby/red-arrow/image/red-arrow.png                 | Bin 7165 -> 0 bytes
 ruby/red-arrow/lib/arrow.rb                        |  30 -
 ruby/red-arrow/lib/arrow/array-builder.rb          | 209 ------
 ruby/red-arrow/lib/arrow/array.rb                  | 222 ------
 ruby/red-arrow/lib/arrow/bigdecimal-extension.rb   |  28 -
 ruby/red-arrow/lib/arrow/block-closable.rb         |  35 -
 ruby/red-arrow/lib/arrow/buffer.rb                 |  28 -
 ruby/red-arrow/lib/arrow/chunked-array.rb          |  91 ---
 ruby/red-arrow/lib/arrow/column-containable.rb     |  48 --
 ruby/red-arrow/lib/arrow/column.rb                 |  76 --
 ruby/red-arrow/lib/arrow/compression-type.rb       |  37 -
 ruby/red-arrow/lib/arrow/csv-loader.rb             | 384 ----------
 ruby/red-arrow/lib/arrow/csv-read-options.rb       |  43 --
 ruby/red-arrow/lib/arrow/data-type.rb              | 198 ------
 ruby/red-arrow/lib/arrow/date32-array-builder.rb   |  32 -
 ruby/red-arrow/lib/arrow/date32-array.rb           |  30 -
 ruby/red-arrow/lib/arrow/date64-array-builder.rb   |  33 -
 ruby/red-arrow/lib/arrow/date64-array.rb           |  29 -
 .../lib/arrow/decimal128-array-builder.rb          |  58 --
 ruby/red-arrow/lib/arrow/decimal128-array.rb       |  24 -
 ruby/red-arrow/lib/arrow/decimal128-data-type.rb   |  71 --
 ruby/red-arrow/lib/arrow/decimal128.rb             |  60 --
 .../lib/arrow/decimal256-array-builder.rb          |  61 --
 ruby/red-arrow/lib/arrow/decimal256-array.rb       |  25 -
 ruby/red-arrow/lib/arrow/decimal256-data-type.rb   |  73 --
 ruby/red-arrow/lib/arrow/decimal256.rb             |  60 --
 ruby/red-arrow/lib/arrow/dense-union-data-type.rb  |  90 ---
 ruby/red-arrow/lib/arrow/dictionary-array.rb       |  24 -
 ruby/red-arrow/lib/arrow/dictionary-data-type.rb   | 117 ---
 ruby/red-arrow/lib/arrow/field-containable.rb      |  38 -
 ruby/red-arrow/lib/arrow/field.rb                  | 118 ---
 ruby/red-arrow/lib/arrow/file-output-stream.rb     |  34 -
 .../lib/arrow/fixed-size-binary-array-builder.rb   |  38 -
 .../red-arrow/lib/arrow/fixed-size-binary-array.rb |  26 -
 ruby/red-arrow/lib/arrow/generic-filterable.rb     |  43 --
 ruby/red-arrow/lib/arrow/generic-takeable.rb       |  38 -
 ruby/red-arrow/lib/arrow/group.rb                  | 172 -----
 ruby/red-arrow/lib/arrow/list-array-builder.rb     |  96 ---
 ruby/red-arrow/lib/arrow/list-data-type.rb         | 118 ---
 ruby/red-arrow/lib/arrow/loader.rb                 | 172 -----
 ruby/red-arrow/lib/arrow/null-array-builder.rb     |  26 -
 ruby/red-arrow/lib/arrow/null-array.rb             |  24 -
 ruby/red-arrow/lib/arrow/path-extension.rb         |  45 --
 ruby/red-arrow/lib/arrow/raw-table-converter.rb    |  47 --
 ruby/red-arrow/lib/arrow/record-batch-builder.rb   | 114 ---
 .../lib/arrow/record-batch-file-reader.rb          |  28 -
 ruby/red-arrow/lib/arrow/record-batch-iterator.rb  |  22 -
 .../lib/arrow/record-batch-stream-reader.rb        |  30 -
 ruby/red-arrow/lib/arrow/record-batch.rb           |  77 --
 ruby/red-arrow/lib/arrow/record-containable.rb     |  38 -
 ruby/red-arrow/lib/arrow/record.rb                 |  60 --
 ruby/red-arrow/lib/arrow/rolling-window.rb         |  48 --
 ruby/red-arrow/lib/arrow/schema.rb                 | 100 ---
 ruby/red-arrow/lib/arrow/slicer.rb                 | 454 ------------
 ruby/red-arrow/lib/arrow/sort-key.rb               | 193 -----
 ruby/red-arrow/lib/arrow/sort-options.rb           | 109 ---
 ruby/red-arrow/lib/arrow/sparse-union-data-type.rb |  90 ---
 ruby/red-arrow/lib/arrow/struct-array-builder.rb   | 146 ----
 ruby/red-arrow/lib/arrow/struct-array.rb           |  68 --
 ruby/red-arrow/lib/arrow/struct-data-type.rb       | 128 ----
 ruby/red-arrow/lib/arrow/table-formatter.rb        |  66 --
 ruby/red-arrow/lib/arrow/table-list-formatter.rb   |  39 -
 ruby/red-arrow/lib/arrow/table-loader.rb           | 187 -----
 ruby/red-arrow/lib/arrow/table-saver.rb            | 169 -----
 ruby/red-arrow/lib/arrow/table-table-formatter.rb  |  73 --
 ruby/red-arrow/lib/arrow/table.rb                  | 525 --------------
 ruby/red-arrow/lib/arrow/tensor.rb                 |  24 -
 ruby/red-arrow/lib/arrow/time.rb                   | 159 -----
 ruby/red-arrow/lib/arrow/time32-array-builder.rb   |  49 --
 ruby/red-arrow/lib/arrow/time32-array.rb           |  28 -
 ruby/red-arrow/lib/arrow/time32-data-type.rb       |  61 --
 ruby/red-arrow/lib/arrow/time64-array-builder.rb   |  49 --
 ruby/red-arrow/lib/arrow/time64-array.rb           |  28 -
 ruby/red-arrow/lib/arrow/time64-data-type.rb       |  61 --
 .../red-arrow/lib/arrow/timestamp-array-builder.rb |  65 --
 ruby/red-arrow/lib/arrow/timestamp-array.rb        |  42 --
 ruby/red-arrow/lib/arrow/timestamp-data-type.rb    |  57 --
 ruby/red-arrow/lib/arrow/version.rb                |  26 -
 ruby/red-arrow/lib/arrow/writable.rb               |  22 -
 ruby/red-arrow/red-arrow.gemspec                   |  66 --
 ruby/red-arrow/test/fixture/TestOrcFile.test1.orc  | Bin 1711 -> 0 bytes
 ruby/red-arrow/test/fixture/float-integer.csv      |  20 -
 ruby/red-arrow/test/fixture/integer-float.csv      |  20 -
 .../test/fixture/null-with-double-quote.csv        |  20 -
 .../test/fixture/null-without-double-quote.csv     |  20 -
 ruby/red-arrow/test/fixture/with-header-float.csv  |  20 -
 ruby/red-arrow/test/fixture/with-header.csv        |  20 -
 .../test/fixture/without-header-float.csv          |  19 -
 ruby/red-arrow/test/fixture/without-header.csv     |  19 -
 ruby/red-arrow/test/helper.rb                      |  27 -
 ruby/red-arrow/test/helper/fixture.rb              |  28 -
 ruby/red-arrow/test/helper/omittable.rb            |  36 -
 .../test/raw-records/test-basic-arrays.rb          | 365 ----------
 .../test/raw-records/test-dense-union-array.rb     | 480 -------------
 ruby/red-arrow/test/raw-records/test-list-array.rb | 552 ---------------
 .../test/raw-records/test-multiple-columns.rb      |  65 --
 .../test/raw-records/test-sparse-union-array.rb    | 470 ------------
 .../test/raw-records/test-struct-array.rb          | 470 ------------
 ruby/red-arrow/test/raw-records/test-table.rb      |  47 --
 ruby/red-arrow/test/run-test.rb                    |  71 --
 ruby/red-arrow/test/test-array-builder.rb          | 129 ----
 ruby/red-arrow/test/test-array.rb                  | 291 --------
 ruby/red-arrow/test/test-bigdecimal.rb             |  40 --
 ruby/red-arrow/test/test-buffer.rb                 |  49 --
 ruby/red-arrow/test/test-chunked-array.rb          | 183 -----
 ruby/red-arrow/test/test-column.rb                 |  92 ---
 ruby/red-arrow/test/test-csv-loader.rb             | 250 -------
 ruby/red-arrow/test/test-data-type.rb              |  83 ---
 ruby/red-arrow/test/test-date32-array.rb           |  24 -
 ruby/red-arrow/test/test-date64-array.rb           |  25 -
 .../test/test-decimal128-array-builder.rb          | 112 ---
 ruby/red-arrow/test/test-decimal128-array.rb       |  38 -
 ruby/red-arrow/test/test-decimal128-data-type.rb   |  31 -
 ruby/red-arrow/test/test-decimal128.rb             | 102 ---
 .../test/test-decimal256-array-builder.rb          | 112 ---
 ruby/red-arrow/test/test-decimal256-array.rb       |  38 -
 ruby/red-arrow/test/test-decimal256-data-type.rb   |  31 -
 ruby/red-arrow/test/test-decimal256.rb             | 102 ---
 ruby/red-arrow/test/test-dense-union-data-type.rb  |  41 --
 ruby/red-arrow/test/test-dictionary-array.rb       |  41 --
 ruby/red-arrow/test/test-dictionary-data-type.rb   |  40 --
 ruby/red-arrow/test/test-feather.rb                |  49 --
 ruby/red-arrow/test/test-field.rb                  |  91 ---
 ruby/red-arrow/test/test-file-output-stream.rb     |  54 --
 .../test/test-fixed-size-binary-array-builder.rb   |  92 ---
 .../red-arrow/test/test-fixed-size-binary-array.rb |  36 -
 ruby/red-arrow/test/test-group.rb                  | 156 ----
 ruby/red-arrow/test/test-list-array-builder.rb     |  79 ---
 ruby/red-arrow/test/test-list-array.rb             |  32 -
 ruby/red-arrow/test/test-list-data-type.rb         |  69 --
 ruby/red-arrow/test/test-null-array.rb             |  23 -
 ruby/red-arrow/test/test-orc.rb                    | 177 -----
 ruby/red-arrow/test/test-record-batch-builder.rb   | 125 ----
 .../test/test-record-batch-file-reader.rb          | 115 ---
 ruby/red-arrow/test/test-record-batch-iterator.rb  |  37 -
 ruby/red-arrow/test/test-record-batch.rb           | 140 ----
 ruby/red-arrow/test/test-rolling-window.rb         |  40 --
 ruby/red-arrow/test/test-schema.rb                 | 134 ----
 ruby/red-arrow/test/test-slicer.rb                 | 488 -------------
 ruby/red-arrow/test/test-sort-indices.rb           |  40 --
 ruby/red-arrow/test/test-sort-key.rb               |  81 ---
 ruby/red-arrow/test/test-sort-options.rb           |  58 --
 ruby/red-arrow/test/test-sparse-union-data-type.rb |  41 --
 ruby/red-arrow/test/test-struct-array-builder.rb   | 184 -----
 ruby/red-arrow/test/test-struct-array.rb           |  94 ---
 ruby/red-arrow/test/test-struct-data-type.rb       | 112 ---
 ruby/red-arrow/test/test-table.rb                  | 788 ---------------------
 ruby/red-arrow/test/test-tensor.rb                 |  56 --
 ruby/red-arrow/test/test-time.rb                   | 288 --------
 ruby/red-arrow/test/test-time32-array.rb           |  81 ---
 ruby/red-arrow/test/test-time32-data-type.rb       |  42 --
 ruby/red-arrow/test/test-time64-array.rb           |  81 ---
 ruby/red-arrow/test/test-time64-data-type.rb       |  42 --
 ruby/red-arrow/test/test-timestamp-array.rb        |  45 --
 ruby/red-arrow/test/test-timestamp-data-type.rb    |  42 --
 ruby/red-arrow/test/values/test-basic-arrays.rb    | 295 --------
 .../test/values/test-dense-union-array.rb          | 468 ------------
 ruby/red-arrow/test/values/test-list-array.rb      | 515 --------------
 .../test/values/test-sparse-union-array.rb         | 459 ------------
 ruby/red-arrow/test/values/test-struct-array.rb    | 467 ------------
 ruby/red-gandiva/.gitignore                        |  18 -
 ruby/red-gandiva/Gemfile                           |  24 -
 ruby/red-gandiva/LICENSE.txt                       | 202 ------
 ruby/red-gandiva/NOTICE.txt                        |   2 -
 ruby/red-gandiva/README.md                         |  68 --
 ruby/red-gandiva/Rakefile                          |  41 --
 ruby/red-gandiva/dependency-check/Rakefile         |  47 --
 ruby/red-gandiva/lib/gandiva.rb                    |  29 -
 ruby/red-gandiva/lib/gandiva/arrow-schema.rb       |  25 -
 ruby/red-gandiva/lib/gandiva/expression-builder.rb |  45 --
 .../lib/gandiva/expression-builder/add.rb          |  40 --
 .../gandiva/expression-builder/binary-operation.rb |  38 -
 .../lib/gandiva/expression-builder/context.rb      |  26 -
 .../lib/gandiva/expression-builder/divide.rb       |  34 -
 .../lib/gandiva/expression-builder/elsif.rb        |  36 -
 .../lib/gandiva/expression-builder/equal.rb        |  33 -
 .../lib/gandiva/expression-builder/field.rb        |  32 -
 .../lib/gandiva/expression-builder/greater-than.rb |  33 -
 .../lib/gandiva/expression-builder/if.rb           |  75 --
 .../lib/gandiva/expression-builder/less-than.rb    |  33 -
 .../lib/gandiva/expression-builder/literal.rb      |  65 --
 .../lib/gandiva/expression-builder/multiply.rb     |  34 -
 .../lib/gandiva/expression-builder/record.rb       |  45 --
 .../lib/gandiva/expression-builder/subtract.rb     |  34 -
 .../lib/gandiva/expression-builder/value.rb        |  55 --
 ruby/red-gandiva/lib/gandiva/loader.rb             |  49 --
 ruby/red-gandiva/lib/gandiva/version.rb            |  26 -
 ruby/red-gandiva/red-gandiva.gemspec               |  49 --
 .../test/expression-builder/test-add.rb            |  54 --
 .../test/expression-builder/test-record.rb         |  45 --
 ruby/red-gandiva/test/helper.rb                    |  20 -
 ruby/red-gandiva/test/run-test.rb                  |  50 --
 ruby/red-gandiva/test/test-boolean-literal-node.rb |  24 -
 ruby/red-gandiva/test/test-projector.rb            |  49 --
 ruby/red-parquet/.gitignore                        |  18 -
 ruby/red-parquet/Gemfile                           |  24 -
 ruby/red-parquet/LICENSE.txt                       | 202 ------
 ruby/red-parquet/NOTICE.txt                        |   2 -
 ruby/red-parquet/README.md                         |  52 --
 ruby/red-parquet/Rakefile                          |  41 --
 ruby/red-parquet/dependency-check/Rakefile         |  47 --
 ruby/red-parquet/lib/parquet.rb                    |  29 -
 .../lib/parquet/arrow-table-loadable.rb            |  36 -
 .../red-parquet/lib/parquet/arrow-table-savable.rb |  52 --
 ruby/red-parquet/lib/parquet/loader.rb             |  46 --
 ruby/red-parquet/lib/parquet/version.rb            |  26 -
 ruby/red-parquet/lib/parquet/writer-properties.rb  |  28 -
 ruby/red-parquet/red-parquet.gemspec               |  49 --
 ruby/red-parquet/test/helper.rb                    |  22 -
 ruby/red-parquet/test/run-test.rb                  |  50 --
 ruby/red-parquet/test/test-arrow-table.rb          |  99 ---
 ruby/red-plasma/.gitignore                         |  18 -
 ruby/red-plasma/Gemfile                            |  24 -
 ruby/red-plasma/LICENSE.txt                        | 202 ------
 ruby/red-plasma/NOTICE.txt                         |   2 -
 ruby/red-plasma/README.md                          |  58 --
 ruby/red-plasma/Rakefile                           |  41 --
 ruby/red-plasma/dependency-check/Rakefile          |  47 --
 ruby/red-plasma/lib/plasma.rb                      |  29 -
 ruby/red-plasma/lib/plasma/client.rb               |  35 -
 ruby/red-plasma/lib/plasma/loader.rb               |  35 -
 ruby/red-plasma/lib/plasma/version.rb              |  26 -
 ruby/red-plasma/red-plasma.gemspec                 |  49 --
 ruby/red-plasma/test/helper.rb                     |  25 -
 ruby/red-plasma/test/helper/omittable.rb           |  36 -
 ruby/red-plasma/test/helper/plasma-store.rb        |  57 --
 ruby/red-plasma/test/run-test.rb                   |  50 --
 ruby/red-plasma/test/test-plasma-client.rb         |  53 --
 297 files changed, 24933 deletions(-)

diff --git a/ruby/Gemfile b/ruby/Gemfile
deleted file mode 100644
index 002a2a0..0000000
--- a/ruby/Gemfile
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-source "https://rubygems.org/"
-
-gem "pkg-config"
diff --git a/ruby/README.md b/ruby/README.md
deleted file mode 100644
index fbcf615..0000000
--- a/ruby/README.md
+++ /dev/null
@@ -1,36 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Apache Arrow Ruby
-
-There are the official Ruby bindings for Apache Arrow.
-
-[Red Arrow](https://github.com/apache/arrow/tree/master/ruby/red-arrow) is the base Apache Arrow bindings.
-
-[Red Arrow CUDA](https://github.com/apache/arrow/tree/master/ruby/red-arrow-cuda) is the Apache Arrow bindings of CUDA part.
-
-[Red Arrow Dataset](https://github.com/apache/arrow/tree/master/ruby/red-arrow-dataset) is the Apache Arrow Dataset bindings.
-
-[Red Gandiva](https://github.com/apache/arrow/tree/master/ruby/red-gandiva) is the Gandiva bindings.
-
-[Red Plasma](https://github.com/apache/arrow/tree/master/ruby/red-plasma) is the Plasma bindings.
-
-[Red Parquet](https://github.com/apache/arrow/tree/master/ruby/red-parquet) is the Parquet bindings.
-
-
diff --git a/ruby/Rakefile b/ruby/Rakefile
deleted file mode 100644
index 64559ef..0000000
--- a/ruby/Rakefile
+++ /dev/null
@@ -1,56 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "pkg-config"
-
-base_dir = File.join(__dir__)
-
-packages = []
-Dir.glob("#{base_dir}/*/*.gemspec") do |gemspec|
-  package = File.basename(File.dirname(gemspec))
-  glib_package_name = package.gsub(/\Ared-/, "") + "-glib"
-  next unless PKGConfig.exist?(glib_package_name)
-  packages << package
-end
-
-packages.each do |package|
-  desc "Run test for #{package}"
-  task package do
-    cd(File.join(base_dir, package)) do
-      if ENV["USE_BUNDLER"]
-        sh("bundle", "exec", "rake")
-      else
-        ruby("-S", "rake")
-      end
-    end
-  end
-end
-
-sorted_packages = packages.sort_by do |package|
-  if package == "red-arrow"
-    "000-#{package}"
-  else
-    package
-  end
-end
-
-desc "Run test for all packages"
-task all: sorted_packages
-
-task default: :all
diff --git a/ruby/red-arrow-cuda/.gitignore b/ruby/red-arrow-cuda/.gitignore
deleted file mode 100644
index 779545d..0000000
--- a/ruby/red-arrow-cuda/.gitignore
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-/pkg/
diff --git a/ruby/red-arrow-cuda/Gemfile b/ruby/red-arrow-cuda/Gemfile
deleted file mode 100644
index 7c4cefc..0000000
--- a/ruby/red-arrow-cuda/Gemfile
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-source "https://rubygems.org/"
-
-gemspec
-
-gem "red-arrow", path: "../red-arrow"
diff --git a/ruby/red-arrow-cuda/LICENSE.txt b/ruby/red-arrow-cuda/LICENSE.txt
deleted file mode 100644
index d645695..0000000
--- a/ruby/red-arrow-cuda/LICENSE.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/ruby/red-arrow-cuda/NOTICE.txt b/ruby/red-arrow-cuda/NOTICE.txt
deleted file mode 100644
index e08aeda..0000000
--- a/ruby/red-arrow-cuda/NOTICE.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Apache Arrow
-Copyright 2016 The Apache Software Foundation
diff --git a/ruby/red-arrow-cuda/README.md b/ruby/red-arrow-cuda/README.md
deleted file mode 100644
index f05e664..0000000
--- a/ruby/red-arrow-cuda/README.md
+++ /dev/null
@@ -1,60 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Red Arrow CUDA - Apache Arrow CUDA Ruby
-
-Red Arrow CUDA is the Ruby bindings of Apache Arrow CUDA. Red Arrow CUDA is based on GObject Introspection.
-
-[Apache Arrow CUDA](https://arrow.apache.org/) is an in-memory columnar data store on GPU.
-
-[GObject Introspection](https://wiki.gnome.org/action/show/Projects/GObjectIntrospection) is a middleware for language bindings of C library. GObject Introspection can generate language bindings automatically at runtime.
-
-Red Arrow CUDA uses [Apache Arrow CUDA GLib](https://github.com/apache/arrow/tree/master/c_glib) and [gobject-introspection gem](https://rubygems.org/gems/gobject-introspection) to generate Ruby bindings of Apache Arrow CUDA.
-
-Apache Arrow CUDA GLib is a C wrapper for [Apache Arrow CUDA C++](https://github.com/apache/arrow/tree/master/cpp). GObject Introspection can't use Apache Arrow CUDA C++ directly. Apache Arrow CUDA GLib is a bridge between Apache Arrow CUDA C++ and GObject Introspection.
-
-gobject-introspection gem is a Ruby bindings of GObject Introspection. Red Arrow CUDA uses GObject Introspection via gobject-introspection gem.
-
-## Install
-
-Install Apache Arrow CUDA GLib before install Red Arrow CUDA. Install Apache Arrow GLib before install Red Arrow. See [Apache Arrow install document](https://arrow.apache.org/install/) for details.
-
-Install Red Arrow CUDA after you install Apache Arrow CUDA GLib:
-
-```text
-% gem install red-arrow-cuda
-```
-
-## Usage
-
-```ruby
-require "arrow-cuda"
-
-manager = ArrowCUDA::DeviceManager.new
-if manager.n_devices.zero?
-  raise "No GPU is found"
-end
-
-context = manager[0]
-buffer = ArrowCUDA::Buffer.new(context, 128)
-ArrowCUDA::BufferOutputStream.open(buffer) do |stream|
-  stream.write("Hello World")
-end
-puts buffer.copy_to_host(0, 11) # => "Hello World"
-```
diff --git a/ruby/red-arrow-cuda/Rakefile b/ruby/red-arrow-cuda/Rakefile
deleted file mode 100644
index 2bbe6e7..0000000
--- a/ruby/red-arrow-cuda/Rakefile
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "rubygems"
-require "bundler/gem_helper"
-
-base_dir = File.join(File.dirname(__FILE__))
-
-helper = Bundler::GemHelper.new(base_dir)
-helper.install
-
-release_task = Rake::Task["release"]
-release_task.prerequisites.replace(["build", "release:rubygem_push"])
-
-desc "Run tests"
-task :test do
-  cd(base_dir) do
-    cd("dependency-check") do
-      ruby("-S", "rake")
-    end
-    ruby("test/run-test.rb")
-  end
-end
-
-task default: :test
diff --git a/ruby/red-arrow-cuda/dependency-check/Rakefile b/ruby/red-arrow-cuda/dependency-check/Rakefile
deleted file mode 100644
index 518c1a6..0000000
--- a/ruby/red-arrow-cuda/dependency-check/Rakefile
+++ /dev/null
@@ -1,47 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "pkg-config"
-require "native-package-installer"
-require_relative "../lib/arrow-cuda/version"
-
-case RUBY_PLATFORM
-when /mingw|mswin/
-  task :default => "nothing"
-else
-  task :default => "dependency:check"
-end
-
-task :nothing do
-end
-
-namespace :dependency do
-  desc "Check dependency"
-  task :check do
-    unless PKGConfig.check_version?("arrow-cuda-glib",
-                                    ArrowCUDA::Version::MAJOR,
-                                    ArrowCUDA::Version::MINOR,
-                                    ArrowCUDA::Version::MICRO)
-      unless NativePackageInstaller.install(:debian => "libarrow-cuda-glib-dev",
-                                            :redhat => "arrow-cuda-glib-devel")
-        exit(false)
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow-cuda/lib/arrow-cuda.rb b/ruby/red-arrow-cuda/lib/arrow-cuda.rb
deleted file mode 100644
index 1fc13d0..0000000
--- a/ruby/red-arrow-cuda/lib/arrow-cuda.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "arrow"
-
-require "arrow-cuda/version"
-
-require "arrow-cuda/loader"
-
-module ArrowCUDA
-  class Error < StandardError
-  end
-
-  Loader.load
-end
diff --git a/ruby/red-arrow-cuda/lib/arrow-cuda/device-manager.rb b/ruby/red-arrow-cuda/lib/arrow-cuda/device-manager.rb
deleted file mode 100644
index bbef749..0000000
--- a/ruby/red-arrow-cuda/lib/arrow-cuda/device-manager.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module ArrowCUDA
-  class DeviceManager
-    # Experimental.
-    #
-    # Can we think device manager is a container of contexts?
-    alias_method :[], :get_context
-  end
-end
diff --git a/ruby/red-arrow-cuda/lib/arrow-cuda/loader.rb b/ruby/red-arrow-cuda/lib/arrow-cuda/loader.rb
deleted file mode 100644
index 6b2afc4..0000000
--- a/ruby/red-arrow-cuda/lib/arrow-cuda/loader.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module ArrowCUDA
-  class Loader < GObjectIntrospection::Loader
-    class << self
-      def load
-        super("ArrowCUDA", ArrowCUDA)
-      end
-    end
-
-    private
-    def post_load(repository, namespace)
-      require_libraries
-    end
-
-    def require_libraries
-      require "arrow-cuda/device-manager"
-    end
-  end
-end
diff --git a/ruby/red-arrow-cuda/lib/arrow-cuda/version.rb b/ruby/red-arrow-cuda/lib/arrow-cuda/version.rb
deleted file mode 100644
index d4031e5..0000000
--- a/ruby/red-arrow-cuda/lib/arrow-cuda/version.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module ArrowCUDA
-  VERSION = "4.0.0-SNAPSHOT"
-
-  module Version
-    numbers, TAG = VERSION.split("-")
-    MAJOR, MINOR, MICRO = numbers.split(".").collect(&:to_i)
-    STRING = VERSION
-  end
-end
diff --git a/ruby/red-arrow-cuda/red-arrow-cuda.gemspec b/ruby/red-arrow-cuda/red-arrow-cuda.gemspec
deleted file mode 100644
index 7bb34c6..0000000
--- a/ruby/red-arrow-cuda/red-arrow-cuda.gemspec
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require_relative "lib/arrow-cuda/version"
-
-Gem::Specification.new do |spec|
-  spec.name = "red-arrow-cuda"
-  version_components = [
-    ArrowCUDA::Version::MAJOR.to_s,
-    ArrowCUDA::Version::MINOR.to_s,
-    ArrowCUDA::Version::MICRO.to_s,
-    ArrowCUDA::Version::TAG,
-  ]
-  spec.version = version_components.compact.join(".")
-  spec.homepage = "https://arrow.apache.org/"
-  spec.authors = ["Apache Arrow Developers"]
-  spec.email = ["dev@arrow.apache.org"]
-
-  spec.summary = "Red Arrow CUDA is the Ruby bindings of Apache Arrow CUDA"
-  spec.description =
-    "Apache Arrow CUDA is a common in-memory columnar data store on CUDA. " +
-    "It's useful to share and process large data."
-  spec.license = "Apache-2.0"
-  spec.files = ["README.md", "Rakefile", "Gemfile", "#{spec.name}.gemspec"]
-  spec.files += ["LICENSE.txt", "NOTICE.txt"]
-  spec.files += Dir.glob("lib/**/*.rb")
-  spec.test_files += Dir.glob("test/**/*")
-  spec.extensions = ["dependency-check/Rakefile"]
-
-  spec.add_runtime_dependency("red-arrow", "= #{spec.version}")
-
-  spec.add_development_dependency("bundler")
-  spec.add_development_dependency("rake")
-  spec.add_development_dependency("test-unit")
-end
diff --git a/ruby/red-arrow-cuda/test/helper.rb b/ruby/red-arrow-cuda/test/helper.rb
deleted file mode 100644
index 045eb10..0000000
--- a/ruby/red-arrow-cuda/test/helper.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "arrow-cuda"
-
-require "test-unit"
diff --git a/ruby/red-arrow-cuda/test/run-test.rb b/ruby/red-arrow-cuda/test/run-test.rb
deleted file mode 100755
index 48d2c49..0000000
--- a/ruby/red-arrow-cuda/test/run-test.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env ruby
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-$VERBOSE = true
-
-require "pathname"
-
-(ENV["ARROW_DLL_PATH"] || "").split(File::PATH_SEPARATOR).each do |path|
-  RubyInstaller::Runtime.add_dll_directory(path)
-end
-
-base_dir = Pathname.new(__dir__).parent.expand_path
-arrow_base_dir = base_dir.parent + "red-arrow"
-
-lib_dir = base_dir + "lib"
-test_dir = base_dir + "test"
-
-arrow_lib_dir = arrow_base_dir + "lib"
-arrow_ext_dir = arrow_base_dir + "ext" + "arrow"
-
-build_dir = ENV["BUILD_DIR"]
-if build_dir
-  arrow_build_dir = Pathname.new(build_dir) + "red-arrow"
-else
-  arrow_build_dir = arrow_ext_dir
-end
-
-$LOAD_PATH.unshift(arrow_build_dir.to_s)
-$LOAD_PATH.unshift(arrow_lib_dir.to_s)
-$LOAD_PATH.unshift(lib_dir.to_s)
-
-require_relative "helper"
-
-exit(Test::Unit::AutoRunner.run(true, test_dir.to_s))
diff --git a/ruby/red-arrow-cuda/test/test-cuda.rb b/ruby/red-arrow-cuda/test/test-cuda.rb
deleted file mode 100644
index a48b687..0000000
--- a/ruby/red-arrow-cuda/test/test-cuda.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-class TestCUDA < Test::Unit::TestCase
-  def setup
-    @manager = ArrowCUDA::DeviceManager.new
-    omit("At least one GPU is required") if @manager.n_devices.zero?
-    @context = @manager[0]
-  end
-
-  sub_test_case("BufferOutputStream") do
-    def setup
-      super
-      @buffer = ArrowCUDA::Buffer.new(@context, 128)
-    end
-
-    def test_new
-      ArrowCUDA::BufferOutputStream.open(@buffer) do |stream|
-        stream.write("Hello World")
-      end
-      assert_equal("Hello World", @buffer.copy_to_host(0, 11).to_s)
-    end
-  end
-end
diff --git a/ruby/red-arrow-dataset/.gitignore b/ruby/red-arrow-dataset/.gitignore
deleted file mode 100644
index 779545d..0000000
--- a/ruby/red-arrow-dataset/.gitignore
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-/pkg/
diff --git a/ruby/red-arrow-dataset/Gemfile b/ruby/red-arrow-dataset/Gemfile
deleted file mode 100644
index 7c4cefc..0000000
--- a/ruby/red-arrow-dataset/Gemfile
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-source "https://rubygems.org/"
-
-gemspec
-
-gem "red-arrow", path: "../red-arrow"
diff --git a/ruby/red-arrow-dataset/LICENSE.txt b/ruby/red-arrow-dataset/LICENSE.txt
deleted file mode 100644
index d645695..0000000
--- a/ruby/red-arrow-dataset/LICENSE.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/ruby/red-arrow-dataset/NOTICE.txt b/ruby/red-arrow-dataset/NOTICE.txt
deleted file mode 100644
index e08aeda..0000000
--- a/ruby/red-arrow-dataset/NOTICE.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Apache Arrow
-Copyright 2016 The Apache Software Foundation
diff --git a/ruby/red-arrow-dataset/README.md b/ruby/red-arrow-dataset/README.md
deleted file mode 100644
index b48ef0b..0000000
--- a/ruby/red-arrow-dataset/README.md
+++ /dev/null
@@ -1,50 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Red Arrow Dataset - Apache Arrow Dataset Ruby
-
-Red Arrow Dataset is the Ruby bindings of Apache Arrow Dataset. Red Arrow Dataset is based on GObject Introspection.
-
-[Apache Arrow Dataset](https://arrow.apache.org/) is one of Apache Arrow components to read and write semantic datasets stored in different locations and formats.
-
-[GObject Introspection](https://wiki.gnome.org/action/show/Projects/GObjectIntrospection) is a middleware for language bindings of C library. GObject Introspection can generate language bindings automatically at runtime.
-
-Red Arrow Dataset uses [Apache Arrow Dataset GLib](https://github.com/apache/arrow/tree/master/c_glib) and [gobject-introspection gem](https://rubygems.org/gems/gobject-introspection) to generate Ruby bindings of Apache Arrow Dataset.
-
-Apache Arrow Dataset GLib is a C wrapper for [Apache Arrow Dataset C++](https://github.com/apache/arrow/tree/master/cpp). GObject Introspection can't use Apache Arrow Dataset C++ directly. Apache Arrow Dataset GLib is a bridge between Apache Arrow Dataset C++ and GObject Introspection.
-
-gobject-introspection gem is a Ruby bindings of GObject Introspection. Red Arrow Dataset uses GObject Introspection via gobject-introspection gem.
-
-## Install
-
-Install Apache Arrow Dataset GLib before install Red Arrow Dataset. Install Apache Arrow GLib before install Red Arrow. See [Apache Arrow install document](https://arrow.apache.org/install/) for details.
-
-Install Red Arrow Dataset after you install Apache Arrow Dataset GLib:
-
-```console
-$ gem install red-arrow-dataset
-```
-
-## Usage
-
-```ruby
-require "arrow-dataset"
-
-# TODO
-```
diff --git a/ruby/red-arrow-dataset/Rakefile b/ruby/red-arrow-dataset/Rakefile
deleted file mode 100644
index 2bbe6e7..0000000
--- a/ruby/red-arrow-dataset/Rakefile
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "rubygems"
-require "bundler/gem_helper"
-
-base_dir = File.join(File.dirname(__FILE__))
-
-helper = Bundler::GemHelper.new(base_dir)
-helper.install
-
-release_task = Rake::Task["release"]
-release_task.prerequisites.replace(["build", "release:rubygem_push"])
-
-desc "Run tests"
-task :test do
-  cd(base_dir) do
-    cd("dependency-check") do
-      ruby("-S", "rake")
-    end
-    ruby("test/run-test.rb")
-  end
-end
-
-task default: :test
diff --git a/ruby/red-arrow-dataset/dependency-check/Rakefile b/ruby/red-arrow-dataset/dependency-check/Rakefile
deleted file mode 100644
index df2e249..0000000
--- a/ruby/red-arrow-dataset/dependency-check/Rakefile
+++ /dev/null
@@ -1,47 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "pkg-config"
-require "native-package-installer"
-require_relative "../lib/arrow-dataset/version"
-
-case RUBY_PLATFORM
-when /mingw|mswin/
-  task :default => "nothing"
-else
-  task :default => "dependency:check"
-end
-
-task :nothing do
-end
-
-namespace :dependency do
-  desc "Check dependency"
-  task :check do
-    unless PKGConfig.check_version?("arrow-dataset-glib",
-                                    ArrowDataset::Version::MAJOR,
-                                    ArrowDataset::Version::MINOR,
-                                    ArrowDataset::Version::MICRO)
-      unless NativePackageInstaller.install(:debian => "libarrow-dataset-glib-dev",
-                                            :redhat => "arrow-dataset-glib-devel")
-        exit(false)
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow-dataset/lib/arrow-dataset.rb b/ruby/red-arrow-dataset/lib/arrow-dataset.rb
deleted file mode 100644
index fe4f2d5..0000000
--- a/ruby/red-arrow-dataset/lib/arrow-dataset.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "arrow"
-
-require "arrow-dataset/version"
-
-require "arrow-dataset/loader"
-
-module ArrowDataset
-  class Error < StandardError
-  end
-
-  Loader.load
-end
diff --git a/ruby/red-arrow-dataset/lib/arrow-dataset/in-memory-fragment.rb b/ruby/red-arrow-dataset/lib/arrow-dataset/in-memory-fragment.rb
deleted file mode 100644
index 917d6c7..0000000
--- a/ruby/red-arrow-dataset/lib/arrow-dataset/in-memory-fragment.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module ArrowDataset
-  class InMemoryFragment
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-    def initialize(schema, record_batches)
-      record_batches = record_batches.collect do |record_batch|
-        unless record_batch.is_a?(Arrow::RecordBatch)
-          record_batch = Arrow::RecordBatch.new(record_batch)
-        end
-        record_batch
-      end
-      initialize_raw(schema, record_batches)
-    end
-  end
-end
diff --git a/ruby/red-arrow-dataset/lib/arrow-dataset/in-memory-scan-task.rb b/ruby/red-arrow-dataset/lib/arrow-dataset/in-memory-scan-task.rb
deleted file mode 100644
index 5e127e1..0000000
--- a/ruby/red-arrow-dataset/lib/arrow-dataset/in-memory-scan-task.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module ArrowDataset
-  class InMemoryScanTask
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-    def initialize(record_batches, **options)
-      record_batches = record_batches.collect do |record_batch|
-        unless record_batch.is_a?(Arrow::RecordBatch)
-          record_batch = Arrow::RecordBatch.new(record_batch)
-        end
-        record_batch
-      end
-      options[:schema] ||= record_batches.first.schema
-      fragment = options.delete(:fragment)
-      fragment ||= InMemoryFragment.new(options[:schema], record_batches)
-      initialize_raw(record_batches, options, fragment)
-    end
-  end
-end
diff --git a/ruby/red-arrow-dataset/lib/arrow-dataset/loader.rb b/ruby/red-arrow-dataset/lib/arrow-dataset/loader.rb
deleted file mode 100644
index fcac52d..0000000
--- a/ruby/red-arrow-dataset/lib/arrow-dataset/loader.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module ArrowDataset
-  class Loader < GObjectIntrospection::Loader
-    class << self
-      def load
-        super("ArrowDataset", ArrowDataset)
-      end
-    end
-
-    private
-    def post_load(repository, namespace)
-      require_libraries
-    end
-
-    def require_libraries
-      require "arrow-dataset/in-memory-scan-task"
-      require "arrow-dataset/scan-options"
-    end
-  end
-end
diff --git a/ruby/red-arrow-dataset/lib/arrow-dataset/scan-options.rb b/ruby/red-arrow-dataset/lib/arrow-dataset/scan-options.rb
deleted file mode 100644
index 1467743..0000000
--- a/ruby/red-arrow-dataset/lib/arrow-dataset/scan-options.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module ArrowDataset
-  class ScanOptions
-    class << self
-      def try_convert(value)
-        case value
-        when Hash
-          return nil unless value.key?(:schema)
-          options = new(value[:schema])
-          value.each do |name, value|
-            next if name == :schema
-            options.__send__("#{name}=", value)
-          end
-          options
-        else
-          nil
-        end
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow-dataset/lib/arrow-dataset/version.rb b/ruby/red-arrow-dataset/lib/arrow-dataset/version.rb
deleted file mode 100644
index 0f1e354..0000000
--- a/ruby/red-arrow-dataset/lib/arrow-dataset/version.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module ArrowDataset
-  VERSION = "4.0.0-SNAPSHOT"
-
-  module Version
-    numbers, TAG = VERSION.split("-")
-    MAJOR, MINOR, MICRO = numbers.split(".").collect(&:to_i)
-    STRING = VERSION
-  end
-end
diff --git a/ruby/red-arrow-dataset/red-arrow-dataset.gemspec b/ruby/red-arrow-dataset/red-arrow-dataset.gemspec
deleted file mode 100644
index 0a60925..0000000
--- a/ruby/red-arrow-dataset/red-arrow-dataset.gemspec
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require_relative "lib/arrow-dataset/version"
-
-Gem::Specification.new do |spec|
-  spec.name = "red-arrow-dataset"
-  version_components = [
-    ArrowDataset::Version::MAJOR.to_s,
-    ArrowDataset::Version::MINOR.to_s,
-    ArrowDataset::Version::MICRO.to_s,
-    ArrowDataset::Version::TAG,
-  ]
-  spec.version = version_components.compact.join(".")
-  spec.homepage = "https://arrow.apache.org/"
-  spec.authors = ["Apache Arrow Developers"]
-  spec.email = ["dev@arrow.apache.org"]
-
-  spec.summary = "Red Arrow Dataset is the Ruby bindings of Apache Arrow Dataset"
-  spec.description =
-    "Apache Arrow Dataset is one of Apache Arrow components to read and write " +
-    "semantic datasets stored in different locations and formats."
-  spec.license = "Apache-2.0"
-  spec.files = ["README.md", "Rakefile", "Gemfile", "#{spec.name}.gemspec"]
-  spec.files += ["LICENSE.txt", "NOTICE.txt"]
-  spec.files += Dir.glob("lib/**/*.rb")
-  spec.test_files += Dir.glob("test/**/*")
-  spec.extensions = ["dependency-check/Rakefile"]
-
-  spec.add_runtime_dependency("red-arrow", "= #{spec.version}")
-
-  spec.add_development_dependency("bundler")
-  spec.add_development_dependency("rake")
-  spec.add_development_dependency("test-unit")
-end
diff --git a/ruby/red-arrow-dataset/test/helper.rb b/ruby/red-arrow-dataset/test/helper.rb
deleted file mode 100644
index 795df3b..0000000
--- a/ruby/red-arrow-dataset/test/helper.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "arrow-dataset"
-
-require "test-unit"
diff --git a/ruby/red-arrow-dataset/test/run-test.rb b/ruby/red-arrow-dataset/test/run-test.rb
deleted file mode 100755
index 48d2c49..0000000
--- a/ruby/red-arrow-dataset/test/run-test.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env ruby
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-$VERBOSE = true
-
-require "pathname"
-
-(ENV["ARROW_DLL_PATH"] || "").split(File::PATH_SEPARATOR).each do |path|
-  RubyInstaller::Runtime.add_dll_directory(path)
-end
-
-base_dir = Pathname.new(__dir__).parent.expand_path
-arrow_base_dir = base_dir.parent + "red-arrow"
-
-lib_dir = base_dir + "lib"
-test_dir = base_dir + "test"
-
-arrow_lib_dir = arrow_base_dir + "lib"
-arrow_ext_dir = arrow_base_dir + "ext" + "arrow"
-
-build_dir = ENV["BUILD_DIR"]
-if build_dir
-  arrow_build_dir = Pathname.new(build_dir) + "red-arrow"
-else
-  arrow_build_dir = arrow_ext_dir
-end
-
-$LOAD_PATH.unshift(arrow_build_dir.to_s)
-$LOAD_PATH.unshift(arrow_lib_dir.to_s)
-$LOAD_PATH.unshift(lib_dir.to_s)
-
-require_relative "helper"
-
-exit(Test::Unit::AutoRunner.run(true, test_dir.to_s))
diff --git a/ruby/red-arrow-dataset/test/test-in-memory-scan-task.rb b/ruby/red-arrow-dataset/test/test-in-memory-scan-task.rb
deleted file mode 100644
index 37f041d..0000000
--- a/ruby/red-arrow-dataset/test/test-in-memory-scan-task.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-class TestInMemoryScanTask < Test::Unit::TestCase
-  def setup
-    @record_batches = [
-      Arrow::RecordBatch.new(visible: [true, false, true],
-                             point: [1, 2, 3]),
-    ]
-  end
-
-  sub_test_case(".new") do
-    test("[[Arrow::RecordBatch]]") do
-      scan_task = ArrowDataset::InMemoryScanTask.new(@record_batches)
-      assert_equal(@record_batches,
-                   scan_task.execute.to_a)
-    end
-  end
-end
diff --git a/ruby/red-arrow-dataset/test/test-scan-options.rb b/ruby/red-arrow-dataset/test/test-scan-options.rb
deleted file mode 100644
index a9a947f..0000000
--- a/ruby/red-arrow-dataset/test/test-scan-options.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-class TestScanOptions < Test::Unit::TestCase
-  def setup
-    @record_batches = [
-      Arrow::RecordBatch.new(visible: [true, false, true],
-                             point: [1, 2, 3]),
-    ]
-    @schema = @record_batches.first.schema
-  end
-
-  sub_test_case(".try_convert") do
-    def test_hash
-      batch_size = 1024
-      context = ArrowDataset::ScanOptions.try_convert(schema: @schema,
-                                                      batch_size: batch_size)
-      assert_equal([@schema, batch_size],
-                   [context.schema, context.batch_size])
-    end
-  end
-end
diff --git a/ruby/red-arrow/.gitignore b/ruby/red-arrow/.gitignore
deleted file mode 100644
index e41483f..0000000
--- a/ruby/red-arrow/.gitignore
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-/.yardoc/
-/doc/reference/
-/ext/arrow/Makefile
-/ext/arrow/mkmf.log
-/pkg/
diff --git a/ruby/red-arrow/.yardopts b/ruby/red-arrow/.yardopts
deleted file mode 100644
index 67159b1..0000000
--- a/ruby/red-arrow/.yardopts
+++ /dev/null
@@ -1,6 +0,0 @@
---output-dir doc/reference
---markup markdown
---no-private
-lib/**/*.rb
--
-doc/text/*
diff --git a/ruby/red-arrow/Gemfile b/ruby/red-arrow/Gemfile
deleted file mode 100644
index 3907918..0000000
--- a/ruby/red-arrow/Gemfile
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-source "https://rubygems.org/"
-
-gemspec
diff --git a/ruby/red-arrow/LICENSE.txt b/ruby/red-arrow/LICENSE.txt
deleted file mode 100644
index d645695..0000000
--- a/ruby/red-arrow/LICENSE.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/ruby/red-arrow/NOTICE.txt b/ruby/red-arrow/NOTICE.txt
deleted file mode 100644
index e08aeda..0000000
--- a/ruby/red-arrow/NOTICE.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Apache Arrow
-Copyright 2016 The Apache Software Foundation
diff --git a/ruby/red-arrow/README.md b/ruby/red-arrow/README.md
deleted file mode 100644
index 20ca83f..0000000
--- a/ruby/red-arrow/README.md
+++ /dev/null
@@ -1,52 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Red Arrow - Apache Arrow Ruby
-
-Red Arrow is the Ruby bindings of Apache Arrow. Red Arrow is based on GObject Introspection.
-
-[Apache Arrow](https://arrow.apache.org/) is an in-memory columnar data store. It's used by many products for data analytics.
-
-[GObject Introspection](https://wiki.gnome.org/action/show/Projects/GObjectIntrospection) is a middleware for language bindings of C library. GObject Introspection can generate language bindings automatically at runtime.
-
-Red Arrow uses [Apache Arrow GLib](https://github.com/apache/arrow/tree/master/c_glib) and [gobject-introspection gem](https://rubygems.org/gems/gobject-introspection) to generate Ruby bindings of Apache Arrow.
-
-Apache Arrow GLib is a C wrapper for [Apache Arrow C++](https://github.com/apache/arrow/tree/master/cpp). GObject Introspection can't use Apache Arrow C++ directly. Apache Arrow GLib is a bridge between Apache Arrow C++ and GObject Introspection.
-
-gobject-introspection gem is a Ruby bindings of GObject Introspection. Red Arrow uses GObject Introspection via gobject-introspection gem.
-
-## Install
-
-Install Apache Arrow GLib before install Red Arrow. See [Apache Arrow install document](https://arrow.apache.org/install/) for details.
-
-Install Red Arrow after you install Apache Arrow GLib:
-
-```console
-% gem install red-arrow
-```
-
-## Usage
-
-```ruby
-require "arrow"
-
-table = Arrow::Table.load("/dev/shm/data.arrow")
-# Process data in table
-table.save("/dev/shm/data-processed.arrow")
-```
diff --git a/ruby/red-arrow/Rakefile b/ruby/red-arrow/Rakefile
deleted file mode 100644
index dd2c310..0000000
--- a/ruby/red-arrow/Rakefile
+++ /dev/null
@@ -1,100 +0,0 @@
-# -*- ruby -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "bundler/gem_helper"
-require "rake/clean"
-require "yard"
-
-base_dir = File.join(__dir__)
-
-helper = Bundler::GemHelper.new(base_dir)
-helper.install
-spec = helper.gemspec
-
-release_task = Rake::Task["release"]
-release_task.prerequisites.replace(["build", "release:rubygem_push"])
-
-def run_extconf(build_dir, extension_dir, *arguments)
-  cd(build_dir) do
-    ruby(File.join(extension_dir, "extconf.rb"),
-         *arguments)
-  end
-end
-
-spec.extensions.each do |extension|
-  extension_dir = File.join(base_dir, File.dirname(extension))
-  build_dir = ENV["BUILD_DIR"]
-  if build_dir
-    build_dir = File.join(build_dir, "red-arrow")
-    directory build_dir
-  else
-    build_dir = extension_dir
-  end
-  CLOBBER << File.join(build_dir, "Makefile")
-  CLOBBER << File.join(build_dir, "mkmf.log")
-
-  makefile = File.join(build_dir, "Makefile")
-  file makefile => build_dir do
-    run_extconf(build_dir, extension_dir)
-  end
-
-  desc "Configure"
-  task :configure => build_dir do
-    run_extconf(build_dir, extension_dir)
-  end
-
-  desc "Compile"
-  task :compile => makefile do
-    cd(build_dir) do
-      sh("make")
-    end
-  end
-
-  task :clean do
-    cd(build_dir) do
-      sh("make", "clean") if File.exist?("Makefile")
-    end
-  end
-end
-
-desc "Run tests"
-task :test do
-  cd(base_dir) do
-    ruby("test/run-test.rb")
-  end
-end
-
-task default: :test
-
-desc "Run benchmarks"
-task :benchmark do
-  benchmarks = if ENV["BENCHMARKS"]
-                 ENV["BENCHMARKS"].split
-               else
-                 FileList["benchmark/{,*/**/}*.yml"]
-               end
-  cd(base_dir) do
-    benchmarks.each do |benchmark|
-      sh("benchmark-driver", benchmark)
-    end
-  end
-end
-
-YARD::Rake::YardocTask.new do |task|
-end
diff --git a/ruby/red-arrow/benchmark/raw-records/boolean.yml b/ruby/red-arrow/benchmark/raw-records/boolean.yml
deleted file mode 100644
index 5e2551e..0000000
--- a/ruby/red-arrow/benchmark/raw-records/boolean.yml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_rows = 1000
-  n_columns = 10
-  type = :boolean
-
-  fields = {}
-  arrays = {}
-  n_columns.times do |i|
-    column_name = "column_#{i}"
-    fields[column_name] = type
-    arrays[column_name] = n_rows.times.map { Faker::Boolean.boolean }
-  end
-  record_batch = Arrow::RecordBatch.new(fields, arrays)
-
-  def pure_ruby_raw_records(record_batch)
-    n_rows = record_batch.n_rows
-    n_columns = record_batch.n_columns
-    columns = record_batch.columns
-    records = []
-    i = 0
-    while i < n_rows
-      record = []
-      j = 0
-      while j < n_columns
-        record << columns[j][i]
-        j += 1
-      end
-      records << record
-      i += 1
-    end
-    records
-  end
-benchmark:
-  pure_ruby: |-
-    pure_ruby_raw_records(record_batch)
-  raw_records: |-
-    record_batch.raw_records
diff --git a/ruby/red-arrow/benchmark/raw-records/decimal128.yml b/ruby/red-arrow/benchmark/raw-records/decimal128.yml
deleted file mode 100644
index 367e7c7..0000000
--- a/ruby/red-arrow/benchmark/raw-records/decimal128.yml
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_rows = 1000
-  n_columns = 10
-  type = Arrow::Decimal128DataType.new(10, 5)
-
-  fields = {}
-  arrays = {}
-  n_columns.times do |i|
-    column_name = "column_#{i}"
-    fields[column_name] = type
-    arrays[column_name] = n_rows.times.map do
-      Faker::Number.decimal(l_digits: 10, r_digits: 5)
-    end
-  end
-  record_batch = Arrow::RecordBatch.new(fields, arrays)
-
-  def pure_ruby_raw_records(record_batch)
-    n_rows = record_batch.n_rows
-    n_columns = record_batch.n_columns
-    columns = record_batch.columns
-    records = []
-    i = 0
-    while i < n_rows
-      record = []
-      j = 0
-      while j < n_columns
-        x = columns[j][i]
-        record << BigDecimal(x.to_s)
-        j += 1
-      end
-      records << record
-      i += 1
-    end
-    records
-  end
-benchmark:
-  pure_ruby: |-
-    pure_ruby_raw_records(record_batch)
-  raw_records: |-
-    record_batch.raw_records()
diff --git a/ruby/red-arrow/benchmark/raw-records/dictionary.yml b/ruby/red-arrow/benchmark/raw-records/dictionary.yml
deleted file mode 100644
index 151bb41..0000000
--- a/ruby/red-arrow/benchmark/raw-records/dictionary.yml
+++ /dev/null
@@ -1,75 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_rows = 1000
-  n_columns = 10
-  type = Arrow::DictionaryDataType.new(:int8, :string, true)
-
-  fields = n_columns.times.map {|i| ["column_#{i}".to_sym, type] }.to_h
-  schema = Arrow::Schema.new(**fields)
-  dictionary = Arrow::StringArray.new(
-    100.times.map { Faker::Book.genre }.uniq.sort
-  )
-  indices = Arrow::Int8Array.new(
-    n_rows.times.map {
-      Faker::Number.within(range: 0 ... dictionary.length)
-    }
-  )
-  arrays = n_columns.times.map do
-    Arrow::DictionaryArray.new(
-      type,
-      indices,
-      dictionary,
-    )
-  end
-  record_batch = Arrow::RecordBatch.new(schema, n_rows, arrays)
-
-  def pure_ruby_raw_records(record_batch)
-    n_rows = record_batch.n_rows
-    n_columns = record_batch.n_columns
-    columns = record_batch.columns
-    records = []
-    i = 0
-    while i < n_rows
-      record = []
-      j = 0
-      while j < n_columns
-        record << columns[j].data.indices[i]
-        j += 1
-      end
-      records << record
-      i += 1
-    end
-    records
-  end
-benchmark:
-  pure_ruby: |-
-    pure_ruby_raw_records(record_batch)
-  raw_records: |-
-    record_batch.raw_records
diff --git a/ruby/red-arrow/benchmark/raw-records/int64.yml b/ruby/red-arrow/benchmark/raw-records/int64.yml
deleted file mode 100644
index bd03ab9..0000000
--- a/ruby/red-arrow/benchmark/raw-records/int64.yml
+++ /dev/null
@@ -1,67 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_rows = 1000
-  n_columns = 10
-  type = :int64
-
-  fields = {}
-  arrays = {}
-  n_columns.times do |i|
-    column_name = "column_#{i}"
-    fields[column_name] = type
-    arrays[column_name] = n_rows.times.map do
-      Faker::Number.number(digits: 18).to_i
-    end
-  end
-  record_batch = Arrow::RecordBatch.new(fields, arrays)
-
-  def pure_ruby_raw_records(record_batch)
-    n_rows = record_batch.n_rows
-    n_columns = record_batch.n_columns
-    columns = record_batch.columns
-    records = []
-    i = 0
-    while i < n_rows
-      record = []
-      j = 0
-      while j < n_columns
-        record << columns[j][i]
-        j += 1
-      end
-      records << record
-      i += 1
-    end
-    records
-  end
-benchmark:
-  pure_ruby: |-
-    pure_ruby_raw_records(record_batch)
-  raw_records: |-
-    record_batch.raw_records
diff --git a/ruby/red-arrow/benchmark/raw-records/list.yml b/ruby/red-arrow/benchmark/raw-records/list.yml
deleted file mode 100644
index b9a5267..0000000
--- a/ruby/red-arrow/benchmark/raw-records/list.yml
+++ /dev/null
@@ -1,70 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_rows = 1000
-  n_columns = 10
-  type = Arrow::ListDataType.new(name: "values", type: :double)
-
-  fields = {}
-  arrays = {}
-  n_columns.times do |i|
-    column_name = "column_#{i}"
-    fields[column_name] = type
-    arrays[column_name] = n_rows.times.map do
-      n_elements = Faker::Number.within(range: 1 ... 100)
-      n_elements.times.map do
-        Faker::Number.normal(mean: 0, standard_deviation: 1e+6)
-      end
-    end
-  end
-  record_batch = Arrow::RecordBatch.new(fields, arrays)
-
-  def pure_ruby_raw_records(record_batch)
-    n_rows = record_batch.n_rows
-    n_columns = record_batch.n_columns
-    columns = record_batch.columns
-    records = []
-    i = 0
-    while i < n_rows
-      record = []
-      j = 0
-      while j < n_columns
-        record << columns[j][i]
-        j += 1
-      end
-      records << record
-      i += 1
-    end
-    records
-  end
-benchmark:
-  pure_ruby: |-
-    pure_ruby_raw_records(record_batch)
-  raw_records: |-
-    record_batch.raw_records
diff --git a/ruby/red-arrow/benchmark/raw-records/string.yml b/ruby/red-arrow/benchmark/raw-records/string.yml
deleted file mode 100644
index 2854a37..0000000
--- a/ruby/red-arrow/benchmark/raw-records/string.yml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_rows = 1000
-  n_columns = 10
-  type = :string
-
-  fields = {}
-  arrays = {}
-  n_columns.times do |i|
-    column_name = "column_#{i}"
-    fields[column_name] = type
-    arrays[column_name] = n_rows.times.map { Faker::Name.name }
-  end
-  record_batch = Arrow::RecordBatch.new(fields, arrays)
-
-  def pure_ruby_raw_records(record_batch)
-    n_rows = record_batch.n_rows
-    n_columns = record_batch.n_columns
-    columns = record_batch.columns
-    records = []
-    i = 0
-    while i < n_rows
-      record = []
-      j = 0
-      while j < n_columns
-        record << columns[j][i]
-        j += 1
-      end
-      records << record
-      i += 1
-    end
-    records
-  end
-benchmark:
-  pure_ruby: |-
-    pure_ruby_raw_records(record_batch)
-  raw_records: |-
-    record_batch.raw_records
diff --git a/ruby/red-arrow/benchmark/raw-records/timestamp.yml b/ruby/red-arrow/benchmark/raw-records/timestamp.yml
deleted file mode 100644
index 9b65b79..0000000
--- a/ruby/red-arrow/benchmark/raw-records/timestamp.yml
+++ /dev/null
@@ -1,75 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_rows = 1000
-  n_columns = 10
-  type = Arrow::TimestampDataType.new(:micro)
-  base_timestamp = Time.at(Faker::Number.within(range: 0 ... 1_000_000_000))
-  thirty_days_in_sec = 30*24*3600
-  timestamp_range = {
-    from: base_timestamp - thirty_days_in_sec,
-    to: base_timestamp + thirty_days_in_sec,
-  }
-
-  fields = {}
-  arrays = {}
-  n_columns.times do |i|
-    column_name = "column_#{i}"
-    fields[column_name] = type
-    arrays[column_name] = n_rows.times.map do
-      sec = Faker::Time.between(timestamp_range).to_i
-      micro = Faker::Number.within(range: 0 ... 1_000_000)
-      sec * 1_000_000 + micro
-    end
-  end
-  record_batch = Arrow::RecordBatch.new(fields, arrays)
-
-  def pure_ruby_raw_records(record_batch)
-    n_rows = record_batch.n_rows
-    n_columns = record_batch.n_columns
-    columns = record_batch.columns
-    records = []
-    i = 0
-    while i < n_rows
-      record = []
-      j = 0
-      while j < n_columns
-        record << columns[j][i]
-        j += 1
-      end
-      records << record
-      i += 1
-    end
-    records
-  end
-benchmark:
-  pure_ruby: |-
-    pure_ruby_raw_records(record_batch)
-  raw_records: |-
-    record_batch.raw_records
diff --git a/ruby/red-arrow/benchmark/values/boolean.yml b/ruby/red-arrow/benchmark/values/boolean.yml
deleted file mode 100644
index 45abff5..0000000
--- a/ruby/red-arrow/benchmark/values/boolean.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_values = 1000
-  values = n_values.times.map { Faker::Boolean.boolean }
-  array = Arrow::BooleanArray.new(values)
-benchmark:
-  pure_ruby: |-
-    array.collect.to_a
-  values: |-
-    array.values
diff --git a/ruby/red-arrow/benchmark/values/decimal128.yml b/ruby/red-arrow/benchmark/values/decimal128.yml
deleted file mode 100644
index 4a2a5bf..0000000
--- a/ruby/red-arrow/benchmark/values/decimal128.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_values = 1000
-  type = Arrow::Decimal128DataType.new(10, 5)
-  values = n_values.times.map { Faker::Number.decimal(l_digits: 10, r_digits: 5) }
-  array = Arrow::Decimal128Array.new(type, values)
-benchmark:
-  pure_ruby: |-
-    array.collect.to_a
-  values: |-
-    array.values
diff --git a/ruby/red-arrow/benchmark/values/dictionary.yml b/ruby/red-arrow/benchmark/values/dictionary.yml
deleted file mode 100644
index 5b4f20d..0000000
--- a/ruby/red-arrow/benchmark/values/dictionary.yml
+++ /dev/null
@@ -1,46 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_values = 1000
-  type = Arrow::DictionaryDataType.new(:int8, :string, true)
-
-  dictionary = Arrow::StringArray.new(
-    100.times.map { Faker::Book.genre }.uniq.sort
-  )
-  indices = Arrow::Int8Array.new(
-    n_values.times.map {
-      Faker::Number.within(range: 0 ... dictionary.length)
-    }
-  )
-  array = Arrow::DictionaryArray.new(type, indices, dictionary)
-benchmark:
-  pure_ruby: |-
-    array.length.times.collect {|i| array.indices[i]}
-  values: |-
-    array.values
diff --git a/ruby/red-arrow/benchmark/values/int64.yml b/ruby/red-arrow/benchmark/values/int64.yml
deleted file mode 100644
index d9e8926..0000000
--- a/ruby/red-arrow/benchmark/values/int64.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_values = 1000
-  values =  n_values.times.map { Faker::Number.number(digits: 18).to_i }
-  array = Arrow::Int64Array.new(values)
-benchmark:
-  pure_ruby: |-
-    array.collect.to_a
-  values: |-
-    array.values
diff --git a/ruby/red-arrow/benchmark/values/list.yml b/ruby/red-arrow/benchmark/values/list.yml
deleted file mode 100644
index 2764c1a..0000000
--- a/ruby/red-arrow/benchmark/values/list.yml
+++ /dev/null
@@ -1,44 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_values = 1000
-  type = Arrow::ListDataType.new(name: "values", type: :double)
-
-  values = n_values.times.map do
-    n_elements = Faker::Number.within(range: 1 ... 100)
-    n_elements.times.map do
-      Faker::Number.normal(mean: 0, standard_deviation: 1e+6)
-    end
-  end
-  array = Arrow::ListArray.new(type, values)
-benchmark:
-  pure_ruby: |-
-    array.collect.to_a
-  values: |-
-    array.values
diff --git a/ruby/red-arrow/benchmark/values/string.yml b/ruby/red-arrow/benchmark/values/string.yml
deleted file mode 100644
index 8a40dea..0000000
--- a/ruby/red-arrow/benchmark/values/string.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_values = 1000
-
-  values = n_values.times.map { Faker::Name.name }
-  array = Arrow::StringArray.new(values)
-benchmark:
-  pure_ruby: |-
-    array.collect.to_a
-  values: |-
-    array.values
diff --git a/ruby/red-arrow/benchmark/values/timestamp.yml b/ruby/red-arrow/benchmark/values/timestamp.yml
deleted file mode 100644
index 4af46d1..0000000
--- a/ruby/red-arrow/benchmark/values/timestamp.yml
+++ /dev/null
@@ -1,49 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-contexts:
-  - name: master
-    prelude: |
-      $LOAD_PATH.unshift(File.expand_path("ext/arrow"))
-      $LOAD_PATH.unshift(File.expand_path("lib"))
-prelude: |-
-  require "arrow"
-  require "faker"
-
-  state = ENV.fetch("FAKER_RANDOM_SEED", 17).to_i
-  Faker::Config.random = Random.new(state)
-
-  n_values = 1000
-  type = Arrow::TimestampDataType.new(:micro)
-  base_timestamp = Time.at(Faker::Number.within(range: 0 ... 1_000_000_000))
-  thirty_days_in_sec = 30*24*3600
-  timestamp_range = {
-    from: base_timestamp - thirty_days_in_sec,
-    to: base_timestamp + thirty_days_in_sec,
-  }
-
-  values = n_values.times.map do
-    sec = Faker::Time.between(timestamp_range).to_i
-    micro = Faker::Number.within(range: 0 ... 1_000_000)
-    sec * 1_000_000 + micro
-  end
-  array = Arrow::TimestampArray.new(type, values)
-benchmark:
-  pure_ruby: |-
-    array.collect.to_a
-  values: |-
-    array.values
diff --git a/ruby/red-arrow/doc/text/development.md b/ruby/red-arrow/doc/text/development.md
deleted file mode 100644
index cc86de3..0000000
--- a/ruby/red-arrow/doc/text/development.md
+++ /dev/null
@@ -1,34 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Development
-
-## Naming convention
-
-### Reader and Writer
-
-Reader and Writer require an opened IO stream.
-
-### Loader and Saver
-
-Loader and Saver require a path. They are convenient classes.
-
-Loader opens the path and reads data by Reader.
-
-Writer opens the path and writes data by Writer.
diff --git a/ruby/red-arrow/example/read-file.rb b/ruby/red-arrow/example/read-file.rb
deleted file mode 100755
index 9a99d33..0000000
--- a/ruby/red-arrow/example/read-file.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env ruby
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "arrow"
-
-Arrow::MemoryMappedInputStream.open("/tmp/file.arrow") do |input|
-  reader = Arrow::RecordBatchFileReader.new(input)
-  fields = reader.schema.fields
-  reader.each_with_index do |record_batch, i|
-    puts("=" * 40)
-    puts("record-batch[#{i}]:")
-    fields.each do |field|
-      field_name = field.name
-      values = record_batch.collect do |record|
-        record[field_name]
-      end
-      puts("  #{field_name}: #{values.inspect}")
-    end
-  end
-end
diff --git a/ruby/red-arrow/example/read-stream.rb b/ruby/red-arrow/example/read-stream.rb
deleted file mode 100755
index c719712..0000000
--- a/ruby/red-arrow/example/read-stream.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env ruby
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "arrow"
-
-Arrow::MemoryMappedInputStream.open("/tmp/stream.arrow") do |input|
-  reader = Arrow::RecordBatchStreamReader.new(input)
-  fields = reader.schema.fields
-  reader.each_with_index do |record_batch, i|
-    puts("=" * 40)
-    puts("record-batch[#{i}]:")
-    fields.each do |field|
-      field_name = field.name
-      values = record_batch.collect do |record|
-        record[field_name]
-      end
-      puts("  #{field_name}: #{values.inspect}")
-    end
-  end
-end
diff --git a/ruby/red-arrow/example/write-file.rb b/ruby/red-arrow/example/write-file.rb
deleted file mode 100755
index c55ab2e..0000000
--- a/ruby/red-arrow/example/write-file.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env ruby
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "arrow"
-
-fields = [
-  Arrow::Field.new("uint8",  :uint8),
-  Arrow::Field.new("uint16", :uint16),
-  Arrow::Field.new("uint32", :uint32),
-  Arrow::Field.new("uint64", :uint64),
-  Arrow::Field.new("int8",   :int8),
-  Arrow::Field.new("int16",  :int16),
-  Arrow::Field.new("int32",  :int32),
-  Arrow::Field.new("int64",  :int64),
-  Arrow::Field.new("float",  :float),
-  Arrow::Field.new("double", :double),
-]
-schema = Arrow::Schema.new(fields)
-
-Arrow::FileOutputStream.open("/tmp/file.arrow", false) do |output|
-  Arrow::RecordBatchFileWriter.open(output, schema) do |writer|
-    uints = [1, 2, 4, 8]
-    ints = [1, -2, 4, -8]
-    floats = [1.1, -2.2, 4.4, -8.8]
-    columns = [
-      Arrow::UInt8Array.new(uints),
-      Arrow::UInt16Array.new(uints),
-      Arrow::UInt32Array.new(uints),
-      Arrow::UInt64Array.new(uints),
-      Arrow::Int8Array.new(ints),
-      Arrow::Int16Array.new(ints),
-      Arrow::Int32Array.new(ints),
-      Arrow::Int64Array.new(ints),
-      Arrow::FloatArray.new(floats),
-      Arrow::DoubleArray.new(floats),
-    ]
-
-    record_batch = Arrow::RecordBatch.new(schema, 4, columns)
-    writer.write_record_batch(record_batch)
-
-    sliced_columns = columns.collect do |column|
-      column.slice(1, 3)
-    end
-    record_batch = Arrow::RecordBatch.new(schema, 3, sliced_columns)
-    writer.write_record_batch(record_batch)
-  end
-end
diff --git a/ruby/red-arrow/example/write-stream.rb b/ruby/red-arrow/example/write-stream.rb
deleted file mode 100755
index fde4862..0000000
--- a/ruby/red-arrow/example/write-stream.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env ruby
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "arrow"
-
-fields = [
-  Arrow::Field.new("uint8",  :uint8),
-  Arrow::Field.new("uint16", :uint16),
-  Arrow::Field.new("uint32", :uint32),
-  Arrow::Field.new("uint64", :uint64),
-  Arrow::Field.new("int8",   :int8),
-  Arrow::Field.new("int16",  :int16),
-  Arrow::Field.new("int32",  :int32),
-  Arrow::Field.new("int64",  :int64),
-  Arrow::Field.new("float",  :float),
-  Arrow::Field.new("double", :double),
-]
-schema = Arrow::Schema.new(fields)
-
-Arrow::FileOutputStream.open("/tmp/stream.arrow", false) do |output|
-  Arrow::RecordBatchStreamWriter.open(output, schema) do |writer|
-    uints = [1, 2, 4, 8]
-    ints = [1, -2, 4, -8]
-    floats = [1.1, -2.2, 4.4, -8.8]
-    columns = [
-      Arrow::UInt8Array.new(uints),
-      Arrow::UInt16Array.new(uints),
-      Arrow::UInt32Array.new(uints),
-      Arrow::UInt64Array.new(uints),
-      Arrow::Int8Array.new(ints),
-      Arrow::Int16Array.new(ints),
-      Arrow::Int32Array.new(ints),
-      Arrow::Int64Array.new(ints),
-      Arrow::FloatArray.new(floats),
-      Arrow::DoubleArray.new(floats),
-    ]
-
-    record_batch = Arrow::RecordBatch.new(schema, 4, columns)
-    writer.write_record_batch(record_batch)
-
-    sliced_columns = columns.collect do |column|
-      column.slice(1, 3)
-    end
-    record_batch = Arrow::RecordBatch.new(schema, 3, sliced_columns)
-    writer.write_record_batch(record_batch)
-  end
-end
diff --git a/ruby/red-arrow/ext/arrow/arrow.cpp b/ruby/red-arrow/ext/arrow/arrow.cpp
deleted file mode 100644
index 6226ba0..0000000
--- a/ruby/red-arrow/ext/arrow/arrow.cpp
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#include "red-arrow.hpp"
-
-#include <ruby.hpp>
-
-namespace red_arrow {
-  VALUE cDate;
-
-  VALUE cArrowTime;
-
-  VALUE ArrowTimeUnitSECOND;
-  VALUE ArrowTimeUnitMILLI;
-  VALUE ArrowTimeUnitMICRO;
-  VALUE ArrowTimeUnitNANO;
-
-  ID id_BigDecimal;
-  ID id_jd;
-  ID id_new;
-  ID id_to_datetime;
-}
-
-extern "C" void Init_arrow() {
-  auto mArrow = rb_const_get_at(rb_cObject, rb_intern("Arrow"));
-
-  auto cArrowArray = rb_const_get_at(mArrow, rb_intern("Array"));
-  rb_define_method(cArrowArray, "values",
-                   reinterpret_cast<rb::RawMethod>(red_arrow::array_values),
-                   0);
-
-  auto cArrowChunkedArray = rb_const_get_at(mArrow, rb_intern("ChunkedArray"));
-  rb_define_method(cArrowChunkedArray, "values",
-                   reinterpret_cast<rb::RawMethod>(red_arrow::chunked_array_values),
-                   0);
-
-  auto cArrowRecordBatch = rb_const_get_at(mArrow, rb_intern("RecordBatch"));
-  rb_define_method(cArrowRecordBatch, "raw_records",
-                   reinterpret_cast<rb::RawMethod>(red_arrow::record_batch_raw_records),
-                   0);
-
-  auto cArrowTable = rb_const_get_at(mArrow, rb_intern("Table"));
-  rb_define_method(cArrowTable, "raw_records",
-                   reinterpret_cast<rb::RawMethod>(red_arrow::table_raw_records),
-                   0);
-
-  red_arrow::cDate = rb_const_get(rb_cObject, rb_intern("Date"));
-
-  red_arrow::cArrowTime = rb_const_get_at(mArrow, rb_intern("Time"));
-
-  auto cArrowTimeUnit = rb_const_get_at(mArrow, rb_intern("TimeUnit"));
-  red_arrow::ArrowTimeUnitSECOND =
-    rb_const_get_at(cArrowTimeUnit, rb_intern("SECOND"));
-  red_arrow::ArrowTimeUnitMILLI =
-    rb_const_get_at(cArrowTimeUnit, rb_intern("MILLI"));
-  red_arrow::ArrowTimeUnitMICRO =
-    rb_const_get_at(cArrowTimeUnit, rb_intern("MICRO"));
-  red_arrow::ArrowTimeUnitNANO =
-    rb_const_get_at(cArrowTimeUnit, rb_intern("NANO"));
-
-  red_arrow::id_BigDecimal = rb_intern("BigDecimal");
-  red_arrow::id_jd = rb_intern("jd");
-  red_arrow::id_new = rb_intern("new");
-  red_arrow::id_to_datetime = rb_intern("to_datetime");
-}
diff --git a/ruby/red-arrow/ext/arrow/converters.cpp b/ruby/red-arrow/ext/arrow/converters.cpp
deleted file mode 100644
index 12e8324..0000000
--- a/ruby/red-arrow/ext/arrow/converters.cpp
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#include "converters.hpp"
-
-namespace red_arrow {
-  VALUE ArrayValueConverter::convert(const arrow::ListArray& array,
-                                     const int64_t i) {
-    return list_array_value_converter_->convert(array, i);
-  }
-
-  VALUE ArrayValueConverter::convert(const arrow::StructArray& array,
-                                     const int64_t i) {
-    return struct_array_value_converter_->convert(array, i);
-  }
-
-  VALUE ArrayValueConverter::convert(const arrow::UnionArray& array,
-                                     const int64_t i) {
-    return union_array_value_converter_->convert(array, i);
-  }
-
-  VALUE ArrayValueConverter::convert(const arrow::DictionaryArray& array,
-                                     const int64_t i) {
-    return dictionary_array_value_converter_->convert(array, i);
-  }
-}
diff --git a/ruby/red-arrow/ext/arrow/converters.hpp b/ruby/red-arrow/ext/arrow/converters.hpp
deleted file mode 100644
index 82d4b55..0000000
--- a/ruby/red-arrow/ext/arrow/converters.hpp
+++ /dev/null
@@ -1,669 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#include "red-arrow.hpp"
-
-#include <ruby.hpp>
-#include <ruby/encoding.h>
-
-#include <arrow-glib/error.hpp>
-
-#include <arrow/util/logging.h>
-
-namespace red_arrow {
-  class ListArrayValueConverter;
-  class StructArrayValueConverter;
-  class UnionArrayValueConverter;
-  class DictionaryArrayValueConverter;
-
-  class ArrayValueConverter {
-  public:
-    ArrayValueConverter()
-      : decimal_buffer_(),
-        list_array_value_converter_(nullptr),
-        struct_array_value_converter_(nullptr),
-        union_array_value_converter_(nullptr),
-        dictionary_array_value_converter_(nullptr) {
-    }
-
-    inline void set_sub_value_converters(ListArrayValueConverter* list_array_value_converter,
-                                         StructArrayValueConverter* struct_array_value_converter,
-                                         UnionArrayValueConverter* union_array_value_converter,
-                                         DictionaryArrayValueConverter* dictionary_array_value_converter) {
-      list_array_value_converter_ = list_array_value_converter;
-      struct_array_value_converter_ = struct_array_value_converter;
-      union_array_value_converter_ = union_array_value_converter;
-      dictionary_array_value_converter_ = dictionary_array_value_converter;
-    }
-
-    inline VALUE convert(const arrow::NullArray& array,
-                         const int64_t i) {
-      return Qnil;
-    }
-
-    inline VALUE convert(const arrow::BooleanArray& array,
-                         const int64_t i) {
-      return array.Value(i) ? Qtrue : Qfalse;
-    }
-
-    inline VALUE convert(const arrow::Int8Array& array,
-                         const int64_t i) {
-      return INT2NUM(array.Value(i));
-    }
-
-    inline VALUE convert(const arrow::Int16Array& array,
-                         const int64_t i) {
-      return INT2NUM(array.Value(i));
-    }
-
-    inline VALUE convert(const arrow::Int32Array& array,
-                         const int64_t i) {
-      return INT2NUM(array.Value(i));
-    }
-
-    inline VALUE convert(const arrow::Int64Array& array,
-                         const int64_t i) {
-      return LL2NUM(array.Value(i));
-    }
-
-    inline VALUE convert(const arrow::UInt8Array& array,
-                         const int64_t i) {
-      return UINT2NUM(array.Value(i));
-    }
-
-    inline VALUE convert(const arrow::UInt16Array& array,
-                         const int64_t i) {
-      return UINT2NUM(array.Value(i));
-    }
-
-    inline VALUE convert(const arrow::UInt32Array& array,
-                         const int64_t i) {
-      return UINT2NUM(array.Value(i));
-    }
-
-    inline VALUE convert(const arrow::UInt64Array& array,
-                         const int64_t i) {
-      return ULL2NUM(array.Value(i));
-    }
-
-    // TODO
-    // inline VALUE convert(const arrow::HalfFloatArray& array,
-    //                      const int64_t i) {
-    // }
-
-    inline VALUE convert(const arrow::FloatArray& array,
-                         const int64_t i) {
-      return DBL2NUM(array.Value(i));
-    }
-
-    inline VALUE convert(const arrow::DoubleArray& array,
-                         const int64_t i) {
-      return DBL2NUM(array.Value(i));
-    }
-
-    inline VALUE convert(const arrow::BinaryArray& array,
-                         const int64_t i) {
-      int32_t length;
-      const auto value = array.GetValue(i, &length);
-      // TODO: encoding support
-      return rb_enc_str_new(reinterpret_cast<const char*>(value),
-                            length,
-                            rb_ascii8bit_encoding());
-    }
-
-    inline VALUE convert(const arrow::StringArray& array,
-                         const int64_t i) {
-      int32_t length;
-      const auto value = array.GetValue(i, &length);
-      return rb_utf8_str_new(reinterpret_cast<const char*>(value),
-                             length);
-    }
-
-    inline VALUE convert(const arrow::FixedSizeBinaryArray& array,
-                         const int64_t i) {
-      return rb_enc_str_new(reinterpret_cast<const char*>(array.Value(i)),
-                            array.byte_width(),
-                            rb_ascii8bit_encoding());
-    }
-
-    constexpr static int32_t JULIAN_DATE_UNIX_EPOCH = 2440588;
-    inline VALUE convert(const arrow::Date32Array& array,
-                         const int64_t i) {
-      const auto value = array.Value(i);
-      const auto days_in_julian = value + JULIAN_DATE_UNIX_EPOCH;
-      return rb_funcall(cDate, id_jd, 1, LONG2NUM(days_in_julian));
-    }
-
-    inline VALUE convert(const arrow::Date64Array& array,
-                         const int64_t i) {
-      const auto value = array.Value(i);
-      auto msec = LL2NUM(value);
-      auto sec = rb_rational_new(msec, INT2NUM(1000));
-      auto time_value = rb_time_num_new(sec, Qnil);
-      return rb_funcall(time_value, id_to_datetime, 0, 0);
-    }
-
-    inline VALUE convert(const arrow::Time32Array& array,
-                         const int64_t i) {
-      const auto type =
-        arrow::internal::checked_cast<const arrow::Time32Type*>(array.type().get());
-      const auto value = array.Value(i);
-      return rb_funcall(red_arrow::cArrowTime,
-                        id_new,
-                        2,
-                        time_unit_to_enum(type->unit()),
-                        INT2NUM(value));
-    }
-
-    inline VALUE convert(const arrow::Time64Array& array,
-                         const int64_t i) {
-      const auto type =
-        arrow::internal::checked_cast<const arrow::Time64Type*>(array.type().get());
-      const auto value = array.Value(i);
-      return rb_funcall(red_arrow::cArrowTime,
-                        id_new,
-                        2,
-                        time_unit_to_enum(type->unit()),
-                        LL2NUM(value));
-    }
-
-    inline VALUE convert(const arrow::TimestampArray& array,
-                         const int64_t i) {
-      const auto type =
-        arrow::internal::checked_cast<const arrow::TimestampType*>(array.type().get());
-      auto scale = time_unit_to_scale(type->unit());
-      auto value = array.Value(i);
-      auto sec = rb_rational_new(LL2NUM(value), scale);
-      return rb_time_num_new(sec, Qnil);
-    }
-
-    // TODO
-    // inline VALUE convert(const arrow::IntervalArray& array,
-    //                      const int64_t i) {
-    // };
-
-    VALUE convert(const arrow::ListArray& array,
-                  const int64_t i);
-
-    VALUE convert(const arrow::StructArray& array,
-                  const int64_t i);
-
-    VALUE convert(const arrow::UnionArray& array,
-                  const int64_t i);
-
-    VALUE convert(const arrow::DictionaryArray& array,
-                  const int64_t i);
-
-    inline VALUE convert(const arrow::Decimal128Array& array,
-                         const int64_t i) {
-      return convert_decimal(std::move(array.FormatValue(i)));
-    }
-
-    inline VALUE convert(const arrow::Decimal256Array& array,
-                         const int64_t i) {
-      return convert_decimal(std::move(array.FormatValue(i)));
-    }
-
-  private:
-    inline VALUE convert_decimal(std::string&& value) {
-      decimal_buffer_ = value;
-      return rb_funcall(rb_cObject,
-                        id_BigDecimal,
-                        1,
-                        rb_enc_str_new(decimal_buffer_.data(),
-                                       decimal_buffer_.length(),
-                                       rb_ascii8bit_encoding()));
-    }
-
-    std::string decimal_buffer_;
-    ListArrayValueConverter* list_array_value_converter_;
-    StructArrayValueConverter* struct_array_value_converter_;
-    UnionArrayValueConverter* union_array_value_converter_;
-    DictionaryArrayValueConverter* dictionary_array_value_converter_;
-  };
-
-  class ListArrayValueConverter : public arrow::ArrayVisitor {
-  public:
-    explicit ListArrayValueConverter(ArrayValueConverter* converter)
-      : array_value_converter_(converter),
-        offset_(0),
-        length_(0),
-        result_(Qnil) {}
-
-    VALUE convert(const arrow::ListArray& array, const int64_t index) {
-      auto values = array.values().get();
-      auto offset_keep = offset_;
-      auto length_keep = length_;
-      offset_ = array.value_offset(index);
-      length_ = array.value_length(index);
-      auto result_keep = result_;
-      result_ = rb_ary_new_capa(length_);
-      check_status(values->Accept(this),
-                   "[raw-records][list-array]");
-      offset_ = offset_keep;
-      length_ = length_keep;
-      auto result_return = result_;
-      result_ = result_keep;
-      return result_return;
-    }
-
-#define VISIT(TYPE)                                                     \
-    arrow::Status Visit(const arrow::TYPE ## Array& array) override {   \
-      return visit_value(array);                                        \
-    }
-
-    VISIT(Null)
-    VISIT(Boolean)
-    VISIT(Int8)
-    VISIT(Int16)
-    VISIT(Int32)
-    VISIT(Int64)
-    VISIT(UInt8)
-    VISIT(UInt16)
-    VISIT(UInt32)
-    VISIT(UInt64)
-    // TODO
-    // VISIT(HalfFloat)
-    VISIT(Float)
-    VISIT(Double)
-    VISIT(Binary)
-    VISIT(String)
-    VISIT(FixedSizeBinary)
-    VISIT(Date32)
-    VISIT(Date64)
-    VISIT(Time32)
-    VISIT(Time64)
-    VISIT(Timestamp)
-    // TODO
-    // VISIT(Interval)
-    VISIT(List)
-    VISIT(Struct)
-    VISIT(SparseUnion)
-    VISIT(DenseUnion)
-    VISIT(Dictionary)
-    VISIT(Decimal128)
-    VISIT(Decimal256)
-    // TODO
-    // VISIT(Extension)
-
-#undef VISIT
-
-  private:
-    template <typename ArrayType>
-    inline VALUE convert_value(const ArrayType& array,
-                               const int64_t i) {
-      return array_value_converter_->convert(array, i);
-    }
-
-    template <typename ArrayType>
-    arrow::Status visit_value(const ArrayType& array) {
-      if (array.null_count() > 0) {
-        for (int64_t i = 0; i < length_; ++i) {
-          auto value = Qnil;
-          if (!array.IsNull(i + offset_)) {
-            value = convert_value(array, i + offset_);
-          }
-          rb_ary_push(result_, value);
-        }
-      } else {
-        for (int64_t i = 0; i < length_; ++i) {
-          rb_ary_push(result_, convert_value(array, i + offset_));
-        }
-      }
-      return arrow::Status::OK();
-    }
-
-    ArrayValueConverter* array_value_converter_;
-    int32_t offset_;
-    int32_t length_;
-    VALUE result_;
-  };
-
-  class StructArrayValueConverter : public arrow::ArrayVisitor {
-  public:
-    explicit StructArrayValueConverter(ArrayValueConverter* converter)
-      : array_value_converter_(converter),
-        key_(Qnil),
-        index_(0),
-        result_(Qnil) {}
-
-    VALUE convert(const arrow::StructArray& array,
-                  const int64_t index) {
-      auto index_keep = index_;
-      auto result_keep = result_;
-      index_ = index;
-      result_ = rb_hash_new();
-      const auto struct_type = array.struct_type();
-      const auto n = struct_type->num_fields();
-      for (int i = 0; i < n; ++i) {
-        const auto field_type = struct_type->field(i).get();
-        const auto& field_name = field_type->name();
-        auto key_keep = key_;
-        key_ = rb_utf8_str_new(field_name.data(), field_name.length());
-        const auto field_array = array.field(i).get();
-        check_status(field_array->Accept(this),
-                     "[raw-records][struct-array]");
-        key_ = key_keep;
-      }
-      auto result_return = result_;
-      result_ = result_keep;
-      index_ = index_keep;
-      return result_return;
-    }
-
-#define VISIT(TYPE)                                                     \
-    arrow::Status Visit(const arrow::TYPE ## Array& array) override {   \
-      fill_field(array);                                                \
-      return arrow::Status::OK();                                       \
-    }
-
-    VISIT(Null)
-    VISIT(Boolean)
-    VISIT(Int8)
-    VISIT(Int16)
-    VISIT(Int32)
-    VISIT(Int64)
-    VISIT(UInt8)
-    VISIT(UInt16)
-    VISIT(UInt32)
-    VISIT(UInt64)
-    // TODO
-    // VISIT(HalfFloat)
-    VISIT(Float)
-    VISIT(Double)
-    VISIT(Binary)
-    VISIT(String)
-    VISIT(FixedSizeBinary)
-    VISIT(Date32)
-    VISIT(Date64)
-    VISIT(Time32)
-    VISIT(Time64)
-    VISIT(Timestamp)
-    // TODO
-    // VISIT(Interval)
-    VISIT(List)
-    VISIT(Struct)
-    VISIT(SparseUnion)
-    VISIT(DenseUnion)
-    VISIT(Dictionary)
-    VISIT(Decimal128)
-    VISIT(Decimal256)
-    // TODO
-    // VISIT(Extension)
-
-#undef VISIT
-
-  private:
-    template <typename ArrayType>
-    inline VALUE convert_value(const ArrayType& array,
-                               const int64_t i) {
-      return array_value_converter_->convert(array, i);
-    }
-
-    template <typename ArrayType>
-    void fill_field(const ArrayType& array) {
-      if (array.IsNull(index_)) {
-        rb_hash_aset(result_, key_, Qnil);
-      } else {
-        rb_hash_aset(result_, key_, convert_value(array, index_));
-      }
-    }
-
-    ArrayValueConverter* array_value_converter_;
-    VALUE key_;
-    int64_t index_;
-    VALUE result_;
-  };
-
-  class UnionArrayValueConverter : public arrow::ArrayVisitor {
-  public:
-    explicit UnionArrayValueConverter(ArrayValueConverter* converter)
-      : array_value_converter_(converter),
-        index_(0),
-        result_(Qnil) {}
-
-    VALUE convert(const arrow::UnionArray& array,
-                  const int64_t index) {
-      const auto index_keep = index_;
-      const auto result_keep = result_;
-      index_ = index;
-      switch (array.mode()) {
-      case arrow::UnionMode::SPARSE:
-        convert_sparse(static_cast<const arrow::SparseUnionArray&>(array));
-        break;
-      case arrow::UnionMode::DENSE:
-        convert_dense(static_cast<const arrow::DenseUnionArray&>(array));
-        break;
-      default:
-        rb_raise(rb_eArgError, "Invalid union mode");
-        break;
-      }
-      auto result_return = result_;
-      index_ = index_keep;
-      result_ = result_keep;
-      return result_return;
-    }
-
-#define VISIT(TYPE)                                                     \
-    arrow::Status Visit(const arrow::TYPE ## Array& array) override {   \
-      convert_value(array);                                             \
-      return arrow::Status::OK();                                       \
-    }
-
-    VISIT(Null)
-    VISIT(Boolean)
-    VISIT(Int8)
-    VISIT(Int16)
-    VISIT(Int32)
-    VISIT(Int64)
-    VISIT(UInt8)
-    VISIT(UInt16)
-    VISIT(UInt32)
-    VISIT(UInt64)
-    // TODO
-    // VISIT(HalfFloat)
-    VISIT(Float)
-    VISIT(Double)
-    VISIT(Binary)
-    VISIT(String)
-    VISIT(FixedSizeBinary)
-    VISIT(Date32)
-    VISIT(Date64)
-    VISIT(Time32)
-    VISIT(Time64)
-    VISIT(Timestamp)
-    // TODO
-    // VISIT(Interval)
-    VISIT(List)
-    VISIT(Struct)
-    VISIT(SparseUnion)
-    VISIT(DenseUnion)
-    VISIT(Dictionary)
-    VISIT(Decimal128)
-    VISIT(Decimal256)
-    // TODO
-    // VISIT(Extension)
-
-#undef VISIT
-
-  private:
-    template <typename ArrayType>
-    inline void convert_value(const ArrayType& array) {
-      auto result = rb_hash_new();
-      if (array.IsNull(index_)) {
-        rb_hash_aset(result, field_name_, Qnil);
-      } else {
-        rb_hash_aset(result,
-                     field_name_,
-                     array_value_converter_->convert(array, index_));
-      }
-      result_ = result;
-    }
-
-    uint8_t compute_field_index(const arrow::UnionArray& array,
-                                arrow::UnionType* type,
-                                const char* tag) {
-      const auto type_code = array.raw_type_codes()[index_];
-      if (type_code >= 0 && type_code <= arrow::UnionType::kMaxTypeCode) {
-        const auto field_id = type->child_ids()[type_code];
-        if (field_id >= 0) {
-          return field_id;
-        }
-      }
-      check_status(arrow::Status::Invalid("Unknown type ID: ", type_code),
-                   tag);
-      return 0;
-    }
-
-    void convert_sparse(const arrow::SparseUnionArray& array) {
-      const auto type =
-        std::static_pointer_cast<arrow::UnionType>(array.type()).get();
-      const auto tag = "[raw-records][union-sparse-array]";
-      const auto index = compute_field_index(array, type, tag);
-      const auto field = type->field(index).get();
-      const auto& field_name = field->name();
-      const auto field_name_keep = field_name_;
-      field_name_ = rb_utf8_str_new(field_name.data(), field_name.length());
-      const auto field_array = array.field(index).get();
-      check_status(field_array->Accept(this), tag);
-      field_name_ = field_name_keep;
-    }
-
-    void convert_dense(const arrow::DenseUnionArray& array) {
-      const auto type =
-        std::static_pointer_cast<arrow::UnionType>(array.type()).get();
-      const auto tag = "[raw-records][union-dense-array]";
-      const auto index = compute_field_index(array, type, tag);
-      const auto field = type->field(index).get();
-      const auto& field_name = field->name();
-      const auto field_name_keep = field_name_;
-      field_name_ = rb_utf8_str_new(field_name.data(), field_name.length());
-      const auto field_array = array.field(index);
-      const auto index_keep = index_;
-      index_ = array.value_offset(index_);
-      check_status(field_array->Accept(this), tag);
-      index_ = index_keep;
-      field_name_ = field_name_keep;
-    }
-
-    ArrayValueConverter* array_value_converter_;
-    int64_t index_;
-    VALUE field_name_;
-    VALUE result_;
-  };
-
-  class DictionaryArrayValueConverter : public arrow::ArrayVisitor {
-  public:
-    explicit DictionaryArrayValueConverter(ArrayValueConverter* converter)
-      : array_value_converter_(converter),
-        value_index_(0),
-        result_(Qnil) {
-    }
-
-    VALUE convert(const arrow::DictionaryArray& array,
-                  const int64_t index) {
-      value_index_ = array.GetValueIndex(index);
-      auto dictionary = array.dictionary().get();
-      check_status(dictionary->Accept(this),
-                   "[raw-records][dictionary-array]");
-      return result_;
-    }
-
-#define VISIT(TYPE)                                                     \
-    arrow::Status Visit(const arrow::TYPE ## Array& array) override {   \
-      result_ = convert_value(array, value_index_);                     \
-      return arrow::Status::OK();                                       \
-      }
-
-    VISIT(Null)
-    VISIT(Boolean)
-    VISIT(Int8)
-    VISIT(Int16)
-    VISIT(Int32)
-    VISIT(Int64)
-    VISIT(UInt8)
-    VISIT(UInt16)
-    VISIT(UInt32)
-    VISIT(UInt64)
-    // TODO
-    // VISIT(HalfFloat)
-    VISIT(Float)
-    VISIT(Double)
-    VISIT(Binary)
-    VISIT(String)
-    VISIT(FixedSizeBinary)
-    VISIT(Date32)
-    VISIT(Date64)
-    VISIT(Time32)
-    VISIT(Time64)
-    VISIT(Timestamp)
-    // TODO
-    // VISIT(Interval)
-    VISIT(List)
-    VISIT(Struct)
-    VISIT(SparseUnion)
-    VISIT(DenseUnion)
-    VISIT(Dictionary)
-    VISIT(Decimal128)
-    VISIT(Decimal256)
-    // TODO
-    // VISIT(Extension)
-
-#undef VISIT
-
-  private:
-    template <typename ArrayType>
-    inline VALUE convert_value(const ArrayType& array,
-                               const int64_t i) {
-      return array_value_converter_->convert(array, i);
-    }
-
-    ArrayValueConverter* array_value_converter_;
-    int64_t value_index_;
-    VALUE result_;
-  };
-
-  class Converter {
-  public:
-    explicit Converter()
-      : array_value_converter_(),
-        list_array_value_converter_(&array_value_converter_),
-        struct_array_value_converter_(&array_value_converter_),
-        union_array_value_converter_(&array_value_converter_),
-        dictionary_array_value_converter_(&array_value_converter_) {
-      array_value_converter_.
-        set_sub_value_converters(&list_array_value_converter_,
-                                 &struct_array_value_converter_,
-                                 &union_array_value_converter_,
-                                 &dictionary_array_value_converter_);
-    }
-
-    template <typename ArrayType>
-    inline VALUE convert_value(const ArrayType& array,
-                               const int64_t i) {
-      return array_value_converter_.convert(array, i);
-    }
-
-    ArrayValueConverter array_value_converter_;
-    ListArrayValueConverter list_array_value_converter_;
-    StructArrayValueConverter struct_array_value_converter_;
-    UnionArrayValueConverter union_array_value_converter_;
-    DictionaryArrayValueConverter dictionary_array_value_converter_;
-  };
-}
diff --git a/ruby/red-arrow/ext/arrow/extconf.rb b/ruby/red-arrow/ext/arrow/extconf.rb
deleted file mode 100644
index 5ba9f4c..0000000
--- a/ruby/red-arrow/ext/arrow/extconf.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "extpp"
-require "mkmf-gnome"
-require_relative "../../lib/arrow/version"
-
-arrow_pkg_config_path = ENV["ARROW_PKG_CONFIG_PATH"]
-if arrow_pkg_config_path
-  pkg_config_paths = [arrow_pkg_config_path, ENV["PKG_CONFIG_PATH"]].compact
-  ENV["PKG_CONFIG_PATH"] = pkg_config_paths.join(File::PATH_SEPARATOR)
-end
-
-unless required_pkg_config_package([
-                                     "arrow",
-                                     Arrow::Version::MAJOR,
-                                     Arrow::Version::MINOR,
-                                     Arrow::Version::MICRO,
-                                   ],
-                                   debian: "libarrow-dev",
-                                   redhat: "arrow-devel",
-                                   homebrew: "apache-arrow",
-                                   msys2: "arrow")
-  exit(false)
-end
-
-unless required_pkg_config_package([
-                                     "arrow-glib",
-                                     Arrow::Version::MAJOR,
-                                     Arrow::Version::MINOR,
-                                     Arrow::Version::MICRO,
-                                   ],
-                                   debian: "libarrow-glib-dev",
-                                   redhat: "arrow-glib-devel",
-                                   homebrew: "apache-arrow-glib",
-                                   msys2: "arrow")
-  exit(false)
-end
-
-[
-  ["glib2", "ext/glib2"],
-].each do |name, relative_source_dir|
-  spec = find_gem_spec(name)
-  source_dir = File.join(spec.full_gem_path, relative_source_dir)
-  build_dir = source_dir
-  add_depend_package_path(name, source_dir, build_dir)
-end
-
-create_makefile("arrow")
diff --git a/ruby/red-arrow/ext/arrow/raw-records.cpp b/ruby/red-arrow/ext/arrow/raw-records.cpp
deleted file mode 100644
index ef9353e..0000000
--- a/ruby/red-arrow/ext/arrow/raw-records.cpp
+++ /dev/null
@@ -1,183 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#include "converters.hpp"
-
-namespace red_arrow {
-  namespace {
-    class RawRecordsBuilder : private Converter, public arrow::ArrayVisitor {
-    public:
-      explicit RawRecordsBuilder(VALUE records, int n_columns)
-        : Converter(),
-          records_(records),
-          n_columns_(n_columns) {
-      }
-
-      void build(const arrow::RecordBatch& record_batch) {
-        rb::protect([&] {
-          const auto n_rows = record_batch.num_rows();
-          for (int64_t i = 0; i < n_rows; ++i) {
-            auto record = rb_ary_new_capa(n_columns_);
-            rb_ary_push(records_, record);
-          }
-          row_offset_ = 0;
-          for (int i = 0; i < n_columns_; ++i) {
-            const auto array = record_batch.column(i).get();
-            column_index_ = i;
-            check_status(array->Accept(this),
-                         "[record-batch][raw-records]");
-          }
-          return Qnil;
-        });
-      }
-
-      void build(const arrow::Table& table) {
-        rb::protect([&] {
-          const auto n_rows = table.num_rows();
-          for (int64_t i = 0; i < n_rows; ++i) {
-            auto record = rb_ary_new_capa(n_columns_);
-            rb_ary_push(records_, record);
-          }
-          for (int i = 0; i < n_columns_; ++i) {
-            const auto& chunked_array = table.column(i).get();
-            column_index_ = i;
-            row_offset_ = 0;
-            for (const auto array : chunked_array->chunks()) {
-              check_status(array->Accept(this),
-                           "[table][raw-records]");
-              row_offset_ += array->length();
-            }
-          }
-          return Qnil;
-        });
-      }
-
-#define VISIT(TYPE)                                                     \
-      arrow::Status Visit(const arrow::TYPE ## Array& array) override { \
-        convert(array);                                                 \
-        return arrow::Status::OK();                                     \
-      }
-
-      VISIT(Null)
-      VISIT(Boolean)
-      VISIT(Int8)
-      VISIT(Int16)
-      VISIT(Int32)
-      VISIT(Int64)
-      VISIT(UInt8)
-      VISIT(UInt16)
-      VISIT(UInt32)
-      VISIT(UInt64)
-      // TODO
-      // VISIT(HalfFloat)
-      VISIT(Float)
-      VISIT(Double)
-      VISIT(Binary)
-      VISIT(String)
-      VISIT(FixedSizeBinary)
-      VISIT(Date32)
-      VISIT(Date64)
-      VISIT(Time32)
-      VISIT(Time64)
-      VISIT(Timestamp)
-      // TODO
-      // VISIT(Interval)
-      VISIT(List)
-      VISIT(Struct)
-      VISIT(SparseUnion)
-      VISIT(DenseUnion)
-      VISIT(Dictionary)
-      VISIT(Decimal128)
-      VISIT(Decimal256)
-      // TODO
-      // VISIT(Extension)
-
-#undef VISIT
-
-    private:
-      template <typename ArrayType>
-      void convert(const ArrayType& array) {
-        const auto n = array.length();
-        if (array.null_count() > 0) {
-          for (int64_t i = 0, ii = row_offset_; i < n; ++i, ++ii) {
-            auto value = Qnil;
-            if (!array.IsNull(i)) {
-              value = convert_value(array, i);
-            }
-            auto record = rb_ary_entry(records_, ii);
-            rb_ary_store(record, column_index_, value);
-          }
-        } else {
-          for (int64_t i = 0, ii = row_offset_; i < n; ++i, ++ii) {
-            auto record = rb_ary_entry(records_, ii);
-            rb_ary_store(record, column_index_, convert_value(array, i));
-          }
-        }
-      }
-
-      // Destination for converted records.
-      VALUE records_;
-
-      // The current column index.
-      int column_index_;
-
-      // The current row offset.
-      int64_t row_offset_;
-
-      // The number of columns.
-      const int n_columns_;
-    };
-  }
-
-  VALUE
-  record_batch_raw_records(VALUE rb_record_batch) {
-    auto garrow_record_batch = GARROW_RECORD_BATCH(RVAL2GOBJ(rb_record_batch));
-    auto record_batch = garrow_record_batch_get_raw(garrow_record_batch).get();
-    const auto n_rows = record_batch->num_rows();
-    const auto n_columns = record_batch->num_columns();
-    auto records = rb_ary_new_capa(n_rows);
-
-    try {
-      RawRecordsBuilder builder(records, n_columns);
-      builder.build(*record_batch);
-    } catch (rb::State& state) {
-      state.jump();
-    }
-
-    return records;
-  }
-
-  VALUE
-  table_raw_records(VALUE rb_table) {
-    auto garrow_table = GARROW_TABLE(RVAL2GOBJ(rb_table));
-    auto table = garrow_table_get_raw(garrow_table).get();
-    const auto n_rows = table->num_rows();
-    const auto n_columns = table->num_columns();
-    auto records = rb_ary_new_capa(n_rows);
-
-    try {
-      RawRecordsBuilder builder(records, n_columns);
-      builder.build(*table);
-    } catch (rb::State& state) {
-      state.jump();
-    }
-
-    return records;
-  }
-}
diff --git a/ruby/red-arrow/ext/arrow/red-arrow.hpp b/ruby/red-arrow/ext/arrow/red-arrow.hpp
deleted file mode 100644
index c3301dc..0000000
--- a/ruby/red-arrow/ext/arrow/red-arrow.hpp
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#pragma once
-
-#include <arrow/api.h>
-
-#ifdef _WIN32
-#  define gmtime_r gmtime_r_ruby_win32
-#  define localtime_r localtime_r_ruby_win32
-#  include <ruby.h>
-#  undef gmtime_r
-#  undef localtime_r
-#endif
-
-#include <arrow-glib/arrow-glib.hpp>
-#include <rbgobject.h>
-
-namespace red_arrow {
-  extern VALUE cDate;
-
-  extern VALUE cArrowTime;
-
-  extern VALUE ArrowTimeUnitSECOND;
-  extern VALUE ArrowTimeUnitMILLI;
-  extern VALUE ArrowTimeUnitMICRO;
-  extern VALUE ArrowTimeUnitNANO;
-
-  extern ID id_BigDecimal;
-  extern ID id_jd;
-  extern ID id_new;
-  extern ID id_to_datetime;
-
-  VALUE array_values(VALUE obj);
-  VALUE chunked_array_values(VALUE obj);
-
-  VALUE record_batch_raw_records(VALUE obj);
-  VALUE table_raw_records(VALUE obj);
-
-  inline VALUE time_unit_to_scale(const arrow::TimeUnit::type unit) {
-    switch (unit) {
-    case arrow::TimeUnit::SECOND:
-      return INT2FIX(1);
-    case arrow::TimeUnit::MILLI:
-      return INT2FIX(1000);
-    case arrow::TimeUnit::MICRO:
-      return INT2FIX(1000 * 1000);
-    case arrow::TimeUnit::NANO:
-      // NOTE: INT2FIX works for 1e+9 because: FIXNUM_MAX >= (1<<30) - 1 > 1e+9
-      return INT2FIX(1000 * 1000 * 1000);
-    default:
-      rb_raise(rb_eArgError, "invalid arrow::TimeUnit: %d", unit);
-      return Qnil;
-    }
-  }
-
-  inline VALUE time_unit_to_enum(const arrow::TimeUnit::type unit) {
-    switch (unit) {
-    case arrow::TimeUnit::SECOND:
-      return red_arrow::ArrowTimeUnitSECOND;
-    case arrow::TimeUnit::MILLI:
-      return red_arrow::ArrowTimeUnitMILLI;
-    case arrow::TimeUnit::MICRO:
-      return red_arrow::ArrowTimeUnitMICRO;
-    case arrow::TimeUnit::NANO:
-      return red_arrow::ArrowTimeUnitNANO;
-    default:
-      rb_raise(rb_eArgError, "invalid arrow::TimeUnit: %d", unit);
-      return Qnil;
-    }
-  }
-
-  inline void check_status(const arrow::Status&& status, const char* context) {
-    GError* error = nullptr;
-    if (!garrow_error_check(&error, status, context)) {
-      RG_RAISE_ERROR(error);
-    }
-  }
-}
diff --git a/ruby/red-arrow/ext/arrow/values.cpp b/ruby/red-arrow/ext/arrow/values.cpp
deleted file mode 100644
index 56846ab..0000000
--- a/ruby/red-arrow/ext/arrow/values.cpp
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-#include "converters.hpp"
-
-namespace red_arrow {
-  namespace {
-    class ValuesBuilder : private Converter, public arrow::ArrayVisitor {
-    public:
-      explicit ValuesBuilder(VALUE values)
-        : Converter(),
-          values_(values),
-          row_offset_(0) {
-      }
-
-      void build(const arrow::Array& array, VALUE rb_array) {
-        rb::protect([&] {
-          check_status(array.Accept(this),
-                       "[array][values]");
-          return Qnil;
-        });
-      }
-
-      void build(const arrow::ChunkedArray& chunked_array,
-                 VALUE rb_chunked_array) {
-        rb::protect([&] {
-          for (const auto& array : chunked_array.chunks()) {
-            check_status(array->Accept(this),
-                         "[chunked-array][values]");
-            row_offset_ += array->length();
-          }
-          return Qnil;
-        });
-      }
-
-#define VISIT(TYPE)                                                     \
-      arrow::Status Visit(const arrow::TYPE ## Array& array) override { \
-        convert(array);                                                 \
-        return arrow::Status::OK();                                     \
-      }
-
-      VISIT(Null)
-      VISIT(Boolean)
-      VISIT(Int8)
-      VISIT(Int16)
-      VISIT(Int32)
-      VISIT(Int64)
-      VISIT(UInt8)
-      VISIT(UInt16)
-      VISIT(UInt32)
-      VISIT(UInt64)
-      // TODO
-      // VISIT(HalfFloat)
-      VISIT(Float)
-      VISIT(Double)
-      VISIT(Binary)
-      VISIT(String)
-      VISIT(FixedSizeBinary)
-      VISIT(Date32)
-      VISIT(Date64)
-      VISIT(Time32)
-      VISIT(Time64)
-      VISIT(Timestamp)
-      // TODO
-      // VISIT(Interval)
-      VISIT(List)
-      VISIT(Struct)
-      VISIT(SparseUnion)
-      VISIT(DenseUnion)
-      VISIT(Dictionary)
-      VISIT(Decimal128)
-      VISIT(Decimal256)
-      // TODO
-      // VISIT(Extension)
-
-#undef VISIT
-
-    private:
-      template <typename ArrayType>
-      void convert(const ArrayType& array) {
-        const auto n = array.length();
-        if (array.null_count() > 0) {
-          for (int64_t i = 0, ii = row_offset_; i < n; ++i, ++ii) {
-            auto value = Qnil;
-            if (!array.IsNull(i)) {
-              value = convert_value(array, i);
-            }
-            rb_ary_store(values_, ii, value);
-          }
-        } else {
-          for (int64_t i = 0, ii = row_offset_; i < n; ++i, ++ii) {
-            rb_ary_store(values_, ii, convert_value(array, i));
-          }
-        }
-      }
-
-      // Destination for converted values.
-      VALUE values_;
-
-      // The current row offset.
-      int64_t row_offset_;
-    };
-  }
-
-  VALUE
-  array_values(VALUE rb_array) {
-    auto garrow_array = GARROW_ARRAY(RVAL2GOBJ(rb_array));
-    auto array = garrow_array_get_raw(garrow_array).get();
-    const auto n_rows = array->length();
-    auto values = rb_ary_new_capa(n_rows);
-
-    try {
-      ValuesBuilder builder(values);
-      builder.build(*array, rb_array);
-    } catch (rb::State& state) {
-      state.jump();
-    }
-
-    return values;
-  }
-
-  VALUE
-  chunked_array_values(VALUE rb_chunked_array) {
-    auto garrow_chunked_array =
-      GARROW_CHUNKED_ARRAY(RVAL2GOBJ(rb_chunked_array));
-    auto chunked_array =
-      garrow_chunked_array_get_raw(garrow_chunked_array).get();
-    const auto n_rows = chunked_array->length();
-    auto values = rb_ary_new_capa(n_rows);
-
-    try {
-      ValuesBuilder builder(values);
-      builder.build(*chunked_array, rb_chunked_array);
-    } catch (rb::State& state) {
-      state.jump();
-    }
-
-    return values;
-  }
-}
diff --git a/ruby/red-arrow/image/red-arrow.png b/ruby/red-arrow/image/red-arrow.png
deleted file mode 100644
index 6db9b4b..0000000
Binary files a/ruby/red-arrow/image/red-arrow.png and /dev/null differ
diff --git a/ruby/red-arrow/lib/arrow.rb b/ruby/red-arrow/lib/arrow.rb
deleted file mode 100644
index 8fbc537..0000000
--- a/ruby/red-arrow/lib/arrow.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "extpp/setup"
-require "gio2"
-
-require "arrow/version"
-
-require "arrow/loader"
-
-module Arrow
-  class Error < StandardError
-  end
-
-  Loader.load
-end
diff --git a/ruby/red-arrow/lib/arrow/array-builder.rb b/ruby/red-arrow/lib/arrow/array-builder.rb
deleted file mode 100644
index 0ce16ca..0000000
--- a/ruby/red-arrow/lib/arrow/array-builder.rb
+++ /dev/null
@@ -1,209 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "date"
-
-module Arrow
-  class ArrayBuilder
-    class << self
-      def build(values)
-        if self != ArrayBuilder
-          builder = new
-          return builder.build(values)
-        end
-
-        builder_info = nil
-        values.each do |value|
-          builder_info = detect_builder_info(value, builder_info)
-          break if builder_info and builder_info[:detected]
-        end
-        if builder_info
-          builder = builder_info[:builder]
-          builder.build(values)
-        else
-          Arrow::StringArray.new(values)
-        end
-      end
-
-      def buildable?(args)
-        args.size == method(:build).arity
-      end
-
-      private
-      def detect_builder_info(value, builder_info)
-        case value
-        when nil
-          builder_info
-        when true, false
-          {
-            builder: BooleanArrayBuilder.new,
-            detected: true,
-          }
-        when String
-          {
-            builder: StringArrayBuilder.new,
-            detected: true,
-          }
-        when Float
-          {
-            builder: DoubleArrayBuilder.new,
-            detected: true,
-          }
-        when Integer
-          if value < 0
-            {
-              builder: IntArrayBuilder.new,
-              detected: true,
-            }
-          else
-            {
-              builder: UIntArrayBuilder.new,
-            }
-          end
-        when Time
-          data_type = value.data_type
-          case data_type.unit
-          when TimeUnit::SECOND
-            builder_info || {
-              builder: Time32ArrayBuilder.new(data_type)
-            }
-          when TimeUnit::MILLI
-            if builder_info and builder_info[:builder].is_a?(Time64ArrayBuilder)
-              builder_info
-            else
-              {
-                builder: Time32ArrayBuilder.new(data_type),
-              }
-            end
-          when TimeUnit::MICRO
-            {
-              builder: Time64ArrayBuilder.new(data_type),
-            }
-          when TimeUnit::NANO
-            {
-              builder: Time64ArrayBuilder.new(data_type),
-              detected: true
-            }
-          end
-        when ::Time
-          data_type = TimestampDataType.new(:nano)
-          {
-            builder: TimestampArrayBuilder.new(data_type),
-            detected: true,
-          }
-        when DateTime
-          {
-            builder: Date64ArrayBuilder.new,
-            detected: true,
-          }
-        when Date
-          {
-            builder: Date32ArrayBuilder.new,
-            detected: true,
-          }
-        when BigDecimal
-          if value.to_arrow.is_a?(Decimal128)
-            {
-              builder: Decimal128ArrayBuilder.new,
-            }
-          else
-            {
-              builder: Decimal256ArrayBuilder.new,
-              detected: true,
-            }
-          end
-        when ::Array
-          sub_builder_info = nil
-          value.each do |sub_value|
-            sub_builder_info = detect_builder_info(sub_value, sub_builder_info)
-            break if sub_builder_info and sub_builder_info[:detected]
-          end
-          if sub_builder_info and sub_builder_info[:detected]
-            sub_value_data_type = sub_builder_info[:builder].value_data_type
-            field = Field.new("item", sub_value_data_type)
-            {
-              builder: ListArrayBuilder.new(ListDataType.new(field)),
-              detected: true,
-            }
-          else
-            builder_info
-          end
-        else
-          {
-            builder: StringArrayBuilder.new,
-            detected: true,
-          }
-        end
-      end
-    end
-
-    def build(values)
-      append(*values)
-      finish
-    end
-
-    # @since 0.12.0
-    def append(*values)
-      value_convertable = respond_to?(:convert_to_arrow_value, true)
-      start_index = 0
-      current_index = 0
-      status = :value
-
-      values.each do |value|
-        if value.nil?
-          if status == :value
-            if start_index != current_index
-              target_values = values[start_index...current_index]
-              if value_convertable
-                target_values = target_values.collect do |v|
-                  convert_to_arrow_value(v)
-                end
-              end
-              append_values(target_values, nil)
-              start_index = current_index
-            end
-            status = :null
-          end
-        else
-          if status == :null
-            append_nulls(current_index - start_index)
-            start_index = current_index
-            status = :value
-          end
-        end
-        current_index += 1
-      end
-      if start_index != current_index
-        if status == :value
-          if start_index == 0 and current_index == values.size
-            target_values = values
-          else
-            target_values = values[start_index...current_index]
-          end
-          if value_convertable
-            target_values = target_values.collect do |v|
-              convert_to_arrow_value(v)
-            end
-          end
-          append_values(target_values, nil)
-        else
-          append_nulls(current_index - start_index)
-        end
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/array.rb b/ruby/red-arrow/lib/arrow/array.rb
deleted file mode 100644
index ae6125d..0000000
--- a/ruby/red-arrow/lib/arrow/array.rb
+++ /dev/null
@@ -1,222 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Array
-    include Enumerable
-    include GenericFilterable
-    include GenericTakeable
-
-    class << self
-      def new(*args)
-        _builder_class = builder_class
-        return super if _builder_class.nil?
-        return super unless _builder_class.buildable?(args)
-        _builder_class.build(*args)
-      end
-
-      def builder_class
-        builder_class_name = "#{name}Builder"
-        return nil unless const_defined?(builder_class_name)
-        const_get(builder_class_name)
-      end
-    end
-
-    # @param i [Integer]
-    #   The index of the value to be gotten.
-    #
-    #   You can specify negative index like for `::Array#[]`.
-    #
-    # @return [Object, nil]
-    #   The `i`-th value.
-    #
-    #   `nil` for NULL value or out of range `i`.
-    def [](i)
-      i += length if i < 0
-      return nil if i < 0 or i >= length
-      if null?(i)
-        nil
-      else
-        get_value(i)
-      end
-    end
-
-    def each
-      return to_enum(__method__) unless block_given?
-
-      length.times do |i|
-        yield(self[i])
-      end
-    end
-
-    def reverse_each
-      return to_enum(__method__) unless block_given?
-
-      (length - 1).downto(0) do |i|
-        yield(self[i])
-      end
-    end
-
-    def to_arrow
-      self
-    end
-
-    alias_method :value_data_type_raw, :value_data_type
-    def value_data_type
-      @value_data_type ||= value_data_type_raw
-    end
-
-    def to_a
-      values
-    end
-
-    alias_method :is_in_raw, :is_in
-    def is_in(values)
-      case values
-      when ::Array
-        if self.class.builder_class.buildable?([values])
-          values = self.class.new(values)
-        else
-          values = self.class.new(value_data_type, values)
-        end
-        is_in_raw(values)
-      when ChunkedArray
-        is_in_chunked_array(values)
-      else
-        is_in_raw(values)
-      end
-    end
-
-    # @api private
-    alias_method :concatenate_raw, :concatenate
-    # Concatenates the given other arrays to the array.
-    #
-    # @param other_arrays [::Array, Arrow::Array] The arrays to be
-    #   concatenated.
-    #
-    #   Each other array is processed by {#resolve} before they're
-    #   concatenated.
-    #
-    # @example Raw Ruby Array
-    #   array = Arrow::Int32Array.new([1])
-    #   array.concatenate([2, 3], [4]) # => Arrow::Int32Array.new([1, 2, 3, 4])
-    #
-    # @example Arrow::Array
-    #   array = Arrow::Int32Array.new([1])
-    #   array.concatenate(Arrow::Int32Array.new([2, 3]),
-    #                     Arrow::Int8Array.new([4])) # => Arrow::Int32Array.new([1, 2, 3, 4])
-    #
-    # @since 4.0.0
-    def concatenate(*other_arrays)
-      other_arrays = other_arrays.collect do |other_array|
-        resolve(other_array)
-      end
-      concatenate_raw(other_arrays)
-    end
-
-    # Concatenates the given other array to the array.
-    #
-    # If you have multiple arrays to be concatenated, you should use
-    # {#concatenate} to concatenate multiple arrays at once.
-    #
-    # @param other_array [::Array, Arrow::Array] The array to be concatenated.
-    #
-    #   `@other_array` is processed by {#resolve} before it's
-    #   concatenated.
-    #
-    # @example Raw Ruby Array
-    #   Arrow::Int32Array.new([1]) + [2, 3] # => Arrow::Int32Array.new([1, 2, 3])
-    #
-    # @example Arrow::Array
-    #   Arrow::Int32Array.new([1]) +
-    #     Arrow::Int32Array.new([2, 3]) # => Arrow::Int32Array.new([1, 2, 3])
-    #
-    # @since 4.0.0
-    def +(other_array)
-      concatenate(other_array)
-    end
-
-    # Ensures returning the same data type array from the given array.
-    #
-    # @return [Arrow::Array]
-    #
-    # @overload resolve(other_raw_array)
-    #
-    #   @param other_raw_array [::Array] A raw Ruby Array. A new Arrow::Array
-    #     is built by `self.class.new`.
-    #
-    #   @example Raw Ruby Array
-    #     int32_array = Arrow::Int32Array.new([1])
-    #     other_array = int32_array.resolve([2, 3, 4])
-    #     other_array # => Arrow::Int32Array.new([2, 3, 4])
-    #
-    # @overload resolve(other_array)
-    #
-    #   @param other_array [Arrow::Array] Another Arrow::Array.
-    #
-    #     If the given other array is an same data type array of
-    #     `self`, the given other array is returned as-is.
-    #
-    #     If the given other array isn't an same data type array of
-    #     `self`, the given other array is casted.
-    #
-    #   @example Same data type
-    #     int32_array = Arrow::Int32Array.new([1])
-    #     other_int32_array = Arrow::Int32Array.new([2, 3, 4])
-    #     other_array = int32_array.resolve(other_int32_array)
-    #     other_array.object_id == other_int32_array.object_id
-    #
-    #   @example Other data type
-    #     int32_array = Arrow::Int32Array.new([1])
-    #     other_int8_array = Arrow::Int8Array.new([2, 3, 4])
-    #     other_array = int32_array.resolve(other_int32_array)
-    #     other_array #=> Arrow::Int32Array.new([2, 3, 4])
-    #
-    # @since 4.0.0
-    def resolve(other_array)
-      if other_array.is_a?(::Array)
-        builder_class = self.class.builder_class
-        if builder_class.nil?
-          message =
-            "[array][resolve] can't build #{value_data_type} array " +
-            "from raw Ruby Array"
-          raise ArgumentError, message
-        end
-        if builder_class.buildable?([other_array])
-          other_array = builder_class.build(other_array)
-        elsif builder_class.buildable?([value_data_type, other_array])
-          other_array = builder_class.build(value_data_type, other_array)
-        else
-          message =
-            "[array][resolve] need to implement " +
-            "a feature that building #{value_data_type} array " +
-            "from raw Ruby Array"
-          raise NotImpelemented, message
-        end
-        other_array
-      elsif other_array.respond_to?(:value_data_type)
-        return other_array if value_data_type == other_array.value_data_type
-        other_array.cast(value_data_type)
-      else
-        message =
-          "[array][resolve] can't build #{value_data_type} array: " +
-          "#{other_array.inspect}"
-        raise ArgumentError, message
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/bigdecimal-extension.rb b/ruby/red-arrow/lib/arrow/bigdecimal-extension.rb
deleted file mode 100644
index 338efe6..0000000
--- a/ruby/red-arrow/lib/arrow/bigdecimal-extension.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "bigdecimal"
-
-class BigDecimal
-  def to_arrow
-    if precision <= Arrow::Decimal128DataType::MAX_PRECISION
-      Arrow::Decimal128.new(to_s)
-    else
-      Arrow::Decimal256.new(to_s)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/block-closable.rb b/ruby/red-arrow/lib/arrow/block-closable.rb
deleted file mode 100644
index ec236bd..0000000
--- a/ruby/red-arrow/lib/arrow/block-closable.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  module BlockClosable
-    def open(*args, &block)
-      io = new(*args)
-      return io unless block
-
-      begin
-        yield(io)
-      ensure
-        if io.respond_to?(:closed?)
-          io.close unless io.closed?
-        else
-          io.close
-        end
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/buffer.rb b/ruby/red-arrow/lib/arrow/buffer.rb
deleted file mode 100644
index 1efd797..0000000
--- a/ruby/red-arrow/lib/arrow/buffer.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Buffer
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-
-    def initialize(data)
-      @data = data
-      initialize_raw(data)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/chunked-array.rb b/ruby/red-arrow/lib/arrow/chunked-array.rb
deleted file mode 100644
index 30dffa8..0000000
--- a/ruby/red-arrow/lib/arrow/chunked-array.rb
+++ /dev/null
@@ -1,91 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class ChunkedArray
-    include Enumerable
-    include GenericFilterable
-    include GenericTakeable
-
-    alias_method :size, :n_rows
-    unless method_defined?(:length)
-      alias_method :length, :n_rows
-    end
-
-    alias_method :chunks_raw, :chunks
-    def chunks
-      @chunks ||= chunks_raw
-    end
-
-    def null?(i)
-      chunks.each do |array|
-        return array.null?(i) if i < array.length
-        i -= array.length
-      end
-      nil
-    end
-
-    def valid?(i)
-      chunks.each do |array|
-        return array.valid?(i) if i < array.length
-        i -= array.length
-      end
-      nil
-    end
-
-    def [](i)
-      i += length if i < 0
-      chunks.each do |array|
-        return array[i] if i < array.length
-        i -= array.length
-      end
-      nil
-    end
-
-    def each(&block)
-      return to_enum(__method__) unless block_given?
-
-      chunks.each do |array|
-        array.each(&block)
-      end
-    end
-
-    def reverse_each(&block)
-      return to_enum(__method__) unless block_given?
-
-      chunks.reverse_each do |array|
-        array.reverse_each(&block)
-      end
-    end
-
-    def each_chunk(&block)
-      chunks.each(&block)
-    end
-
-    def pack
-      first_chunk = chunks.first
-      data_type = first_chunk.value_data_type
-      case data_type
-      when TimestampDataType
-        builder = TimestampArrayBuilder.new(data_type)
-        builder.build(to_a)
-      else
-        first_chunk.class.new(to_a)
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/column-containable.rb b/ruby/red-arrow/lib/arrow/column-containable.rb
deleted file mode 100644
index 51ad88e..0000000
--- a/ruby/red-arrow/lib/arrow/column-containable.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  module ColumnContainable
-    def columns
-      @columns ||= schema.n_fields.times.collect do |i|
-        Column.new(self, i)
-      end
-    end
-
-    def each_column(&block)
-      columns.each(&block)
-    end
-
-    def find_column(name_or_index)
-      case name_or_index
-      when String, Symbol
-        name = name_or_index.to_s
-        index = schema.get_field_index(name)
-        return nil if index == -1
-        Column.new(self, index)
-      when Integer
-        index = name_or_index
-        index += n_columns if index < 0
-        return nil if index < 0 or index >= n_columns
-        Column.new(self, index)
-      else
-        message = "column name or index must be String, Symbol or Integer"
-        raise ArgumentError, message
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/column.rb b/ruby/red-arrow/lib/arrow/column.rb
deleted file mode 100644
index 06f3dbd..0000000
--- a/ruby/red-arrow/lib/arrow/column.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Column
-    include Enumerable
-
-    attr_reader :container
-    attr_reader :field
-    attr_reader :data
-    def initialize(container, index)
-      @container = container
-      @index = index
-      @field = @container.schema[@index]
-      @data = @container.get_column_data(@index)
-    end
-
-    def name
-      @field.name
-    end
-
-    def data_type
-      @field.data_type
-    end
-
-    def null?(i)
-      @data.null?(i)
-    end
-
-    def valid?(i)
-      @data.valid?(i)
-    end
-
-    def [](i)
-      @data[i]
-    end
-
-    def each(&block)
-      @data.each(&block)
-    end
-
-    def reverse_each(&block)
-      @data.reverse_each(&block)
-    end
-
-    def n_rows
-      @data.n_rows
-    end
-    alias_method :size, :n_rows
-    alias_method :length, :n_rows
-
-    def n_nulls
-      @data.n_nulls
-    end
-
-    def ==(other)
-      other.is_a?(self.class) and
-        @field == other.field and
-        @data == other.data
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/compression-type.rb b/ruby/red-arrow/lib/arrow/compression-type.rb
deleted file mode 100644
index b913e48..0000000
--- a/ruby/red-arrow/lib/arrow/compression-type.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class CompressionType
-    EXTENSIONS = {}
-    values.each do |value|
-      case value
-      when UNCOMPRESSED
-      when GZIP
-        EXTENSIONS["gz"] = value
-      else
-        EXTENSIONS[value.nick] = value
-      end
-    end
-
-    class << self
-      def resolve_extension(extension)
-        EXTENSIONS[extension.to_s]
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/csv-loader.rb b/ruby/red-arrow/lib/arrow/csv-loader.rb
deleted file mode 100644
index f82263e..0000000
--- a/ruby/red-arrow/lib/arrow/csv-loader.rb
+++ /dev/null
@@ -1,384 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "csv"
-require "pathname"
-require "time"
-
-module Arrow
-  class CSVLoader
-    class << self
-      def load(path_or_data, **options)
-        new(path_or_data, **options).load
-      end
-    end
-
-    def initialize(path_or_data, **options)
-      @path_or_data = path_or_data
-      @options = options
-      if @options.key?(:delimiter)
-        @options[:col_sep] = @options.delete(:delimiter)
-      end
-      @compression = @options.delete(:compression)
-    end
-
-    def load
-      case @path_or_data
-      when Pathname
-        load_from_path(@path_or_data.to_path)
-      when /\A.+\.csv\z/i
-        load_from_path(@path_or_data)
-      else
-        load_data(@path_or_data)
-      end
-    end
-
-    private
-    def open_csv(path, **options)
-      CSV.open(path, **options) do |csv|
-        yield(csv)
-      end
-    end
-
-    def parse_csv_data(data, **options)
-      csv = CSV.new(data, **options)
-      begin
-        yield(csv)
-      ensure
-        csv.close
-      end
-    end
-
-    def read_csv(csv)
-      values_set = []
-      csv.each do |row|
-        if row.is_a?(CSV::Row)
-          row = row.collect(&:last)
-        end
-        row.each_with_index do |value, i|
-          values = (values_set[i] ||= [])
-          values << value
-        end
-      end
-      return nil if values_set.empty?
-
-      arrays = values_set.collect.with_index do |values, i|
-        ArrayBuilder.build(values)
-      end
-      if csv.headers
-        names = csv.headers
-      else
-        names = arrays.size.times.collect(&:to_s)
-      end
-      raw_table = {}
-      names.each_with_index do |name, i|
-        raw_table[name] = arrays[i]
-      end
-      Table.new(raw_table)
-    end
-
-    def reader_options
-      options = CSVReadOptions.new
-      @options.each do |key, value|
-        case key
-        when :headers
-          case value
-          when ::Array
-            options.column_names = value
-          when String
-            return nil
-          else
-            if value
-              options.generate_column_names = false
-            else
-              options.generate_column_names = true
-            end
-          end
-        when :column_types
-          value.each do |name, type|
-            options.add_column_type(name, type)
-          end
-        when :schema
-          options.add_schema(value)
-        when :encoding
-          # process encoding on opening input
-        when :col_sep
-          options.delimiter = value
-        else
-          setter = "#{key}="
-          if options.respond_to?(setter)
-            options.__send__(setter, value)
-          else
-            return nil
-          end
-        end
-      end
-      options
-    end
-
-    def open_decompress_input(raw_input)
-      if @compression
-        codec = Codec.new(@compression)
-        CompressedInputStream.open(codec, raw_input) do |input|
-          yield(input)
-        end
-      else
-        yield(raw_input)
-      end
-    end
-
-    def open_encoding_convert_stream(raw_input, &block)
-      encoding = @options[:encoding]
-      if encoding
-        converter = Gio::CharsetConverter.new("UTF-8", encoding)
-        convert_input_stream =
-          Gio::ConverterInputStream.new(raw_input, converter)
-        GIOInputStream.open(convert_input_stream, &block)
-      else
-        yield(raw_input)
-      end
-    end
-
-    def wrap_input(raw_input)
-      open_decompress_input(raw_input) do |input_|
-        open_encoding_convert_stream(input_) do |input__|
-          yield(input__)
-        end
-      end
-    end
-
-    def load_from_path(path)
-      options = reader_options
-      if options
-        begin
-          MemoryMappedInputStream.open(path) do |raw_input|
-            wrap_input(raw_input) do |input|
-              return CSVReader.new(input, options).read
-            end
-          end
-        rescue Arrow::Error::Invalid, Gio::Error
-        end
-      end
-
-      options = update_csv_parse_options(@options, :open_csv, path)
-      open_csv(path, **options) do |csv|
-        read_csv(csv)
-      end
-    end
-
-    def load_data(data)
-      options = reader_options
-      if options
-        begin
-          BufferInputStream.open(Buffer.new(data)) do |raw_input|
-            wrap_input(raw_input) do |input|
-              return CSVReader.new(input, options).read
-            end
-          end
-        rescue Arrow::Error::Invalid, Gio::Error
-        end
-      end
-
-      options = update_csv_parse_options(@options, :parse_csv_data, data)
-      parse_csv_data(data, **options) do |csv|
-        read_csv(csv)
-      end
-    end
-
-    def selective_converter(target_index)
-      lambda do |field, field_info|
-        if target_index.nil? or field_info.index == target_index
-          yield(field)
-        else
-          field
-        end
-      end
-    end
-
-    BOOLEAN_CONVERTER = lambda do |field|
-      begin
-        encoded_field = field.encode(CSV::ConverterEncoding)
-      rescue EncodingError
-        field
-      else
-        case encoded_field
-        when "true"
-          true
-        when "false"
-          false
-        else
-          field
-        end
-      end
-    end
-
-    ISO8601_CONVERTER = lambda do |field|
-      begin
-        encoded_field = field.encode(CSV::ConverterEncoding)
-      rescue EncodingError
-        field
-      else
-        begin
-          ::Time.iso8601(encoded_field)
-        rescue ArgumentError
-          field
-        end
-      end
-    end
-
-    AVAILABLE_CSV_PARSE_OPTIONS = {}
-    CSV.instance_method(:initialize).parameters.each do |type, name|
-      AVAILABLE_CSV_PARSE_OPTIONS[name] = true if type == :key
-    end
-
-    def update_csv_parse_options(options, create_csv, *args)
-      if options.key?(:converters)
-        new_options = options.dup
-      else
-        converters = [:all, BOOLEAN_CONVERTER, ISO8601_CONVERTER]
-        new_options = options.merge(converters: converters)
-      end
-
-      # TODO: Support :schema and :column_types
-
-      unless AVAILABLE_CSV_PARSE_OPTIONS.empty?
-        new_options.select! do |key, value|
-          AVAILABLE_CSV_PARSE_OPTIONS.key?(key)
-        end
-      end
-
-      unless options.key?(:headers)
-        __send__(create_csv, *args, **new_options) do |csv|
-          new_options[:headers] = have_header?(csv)
-        end
-      end
-      unless options.key?(:converters)
-        __send__(create_csv, *args, **new_options) do |csv|
-          new_options[:converters] = detect_robust_converters(csv)
-        end
-      end
-
-      new_options
-    end
-
-    def have_header?(csv)
-      if @options.key?(:headers)
-        return @options[:headers]
-      end
-
-      row1 = csv.shift
-      return false if row1.nil?
-      return false if row1.any?(&:nil?)
-
-      row2 = csv.shift
-      return nil if row2.nil?
-      return true if row2.any?(&:nil?)
-
-      return false if row1.any? {|value| not value.is_a?(String)}
-
-      if row1.collect(&:class) != row2.collect(&:class)
-        return true
-      end
-
-      nil
-    end
-
-    def detect_robust_converters(csv)
-      column_types = []
-      csv.each do |row|
-        if row.is_a?(CSV::Row)
-          each_value = Enumerator.new do |yielder|
-            row.each do |_name, value|
-              yielder << value
-            end
-          end
-        else
-          each_value = row.each
-        end
-        each_value.with_index do |value, i|
-          current_column_type = column_types[i]
-          next if current_column_type == :string
-
-          candidate_type = nil
-          case value
-          when nil
-            next
-          when "true", "false", true, false
-            candidate_type = :boolean
-          when Integer
-            candidate_type = :integer
-            if current_column_type == :float
-              candidate_type = :float
-            end
-          when Float
-            candidate_type = :float
-            if current_column_type == :integer
-              column_types[i] = candidate_type
-            end
-          when ::Time
-            candidate_type = :time
-          when DateTime
-            candidate_type = :date_time
-          when Date
-            candidate_type = :date
-          when String
-            next if value.empty?
-            candidate_type = :string
-          else
-            candidate_type = :string
-          end
-
-          column_types[i] ||= candidate_type
-          if column_types[i] != candidate_type
-            column_types[i] = :string
-          end
-        end
-      end
-
-      converters = []
-      column_types.each_with_index do |type, i|
-        case type
-        when :boolean
-          converters << selective_converter(i, &BOOLEAN_CONVERTER)
-        when :integer
-          converters << selective_converter(i) do |field|
-            if field.nil? or field.empty?
-              nil
-            else
-              CSV::Converters[:integer].call(field)
-            end
-          end
-        when :float
-          converters << selective_converter(i) do |field|
-            if field.nil? or field.empty?
-              nil
-            else
-              CSV::Converters[:float].call(field)
-            end
-          end
-        when :time
-          converters << selective_converter(i, &ISO8601_CONVERTER)
-        when :date_time
-          converters << selective_converter(i, &CSV::Converters[:date_time])
-        when :date
-          converters << selective_converter(i, &CSV::Converters[:date])
-        end
-      end
-      converters
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/csv-read-options.rb b/ruby/red-arrow/lib/arrow/csv-read-options.rb
deleted file mode 100644
index dec3dec..0000000
--- a/ruby/red-arrow/lib/arrow/csv-read-options.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class CSVReadOptions
-    alias_method :add_column_type_raw, :add_column_type
-    def add_column_type(name, type)
-      add_column_type_raw(name, DataType.resolve(type))
-    end
-
-    alias_method :delimiter_raw, :delimiter
-    def delimiter
-      delimiter_raw.chr
-    end
-
-    alias_method :delimiter_raw=, :delimiter=
-    def delimiter=(delimiter)
-      case delimiter
-      when String
-        if delimiter.bytesize != 1
-          message = "delimiter must be 1 byte character: #{delimiter.inspect}"
-          raise ArgumentError, message
-        end
-        delimiter = delimiter.ord
-      end
-      self.delimiter_raw = delimiter
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/data-type.rb b/ruby/red-arrow/lib/arrow/data-type.rb
deleted file mode 100644
index 07b4525..0000000
--- a/ruby/red-arrow/lib/arrow/data-type.rb
+++ /dev/null
@@ -1,198 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class DataType
-    class << self
-      # Ensure returning suitable {Arrow::DataType}.
-      #
-      # @overload resolve(data_type)
-      #
-      #   Returns the given data type itself. This is convenient to
-      #   use this method as {Arrow::DataType} converter.
-      #
-      #   @param data_type [Arrow::DataType] The data type.
-      #
-      #   @return [Arrow::DataType] The given data type itself.
-      #
-      # @overload resolve(name)
-      #
-      #   Creates a suitable data type from the given type name. For
-      #   example, you can create {Arrow::BooleanDataType} from
-      #   `:boolean`.
-      #
-      #   @param name [String, Symbol] The type name of the data type.
-      #
-      #   @return [Arrow::DataType] A new suitable data type.
-      #
-      #   @example Create a boolean data type
-      #     Arrow::DataType.resolve(:boolean)
-      #
-      # @overload resolve(name_with_arguments)
-      #
-      #   Creates a new suitable data type from the given type name
-      #   with arguments.
-      #
-      #   @param name_with_arguments [::Array<String, ...>]
-      #     The type name of the data type as the first element.
-      #
-      #     The rest elements are additional information of the data type.
-      #
-      #     For example, {Arrow::TimestampDataType} needs unit as
-      #     additional information.
-      #
-      #   @return [Arrow::DataType] A new suitable data type.
-      #
-      #   @example Create a boolean data type
-      #     Arrow::DataType.resolve([:boolean])
-      #
-      #   @example Create a milliseconds unit timestamp data type
-      #     Arrow::DataType.resolve([:timestamp, :milli])
-      #
-      # @overload resolve(description)
-      #
-      #   Creates a new suitable data type from the given data type
-      #   description.
-      #
-      #   Data type description is a raw `Hash`. Data type description
-      #   must have `:type` value. `:type` is the type of the data type.
-      #
-      #   If the type needs additional information, you need to
-      #   specify it. See constructor document what information is
-      #   needed. For example, {Arrow::ListDataType#initialize} needs
-      #   `:field` value.
-      #
-      #   @param description [Hash] The description of the data type.
-      #
-      #   @option description [String, Symbol] :type The type name of
-      #     the data type.
-      #
-      #   @return [Arrow::DataType] A new suitable data type.
-      #
-      #   @example Create a boolean data type
-      #     Arrow::DataType.resolve(type: :boolean)
-      #
-      #   @example Create a list data type
-      #     Arrow::DataType.resolve(type: :list,
-      #                             field: {name: "visible", type: :boolean})
-      def resolve(data_type)
-        case data_type
-        when DataType
-          data_type
-        when String, Symbol
-          resolve_class(data_type).new
-        when ::Array
-          type, *arguments = data_type
-          resolve_class(type).new(*arguments)
-        when Hash
-          type = nil
-          description = {}
-          data_type.each do |key, value|
-            key = key.to_sym
-            case key
-            when :type
-              type = value
-            else
-              description[key] = value
-            end
-          end
-          if type.nil?
-            message =
-              "data type description must have :type value: #{data_type.inspect}"
-            raise ArgumentError, message
-          end
-          data_type_class = resolve_class(type)
-          if description.empty?
-            data_type_class.new
-          else
-            data_type_class.new(description)
-          end
-        else
-          message =
-            "data type must be " +
-            "Arrow::DataType, String, Symbol, [String, ...], [Symbol, ...] " +
-            "{type: String, ...} or {type: Symbol, ...}: #{data_type.inspect}"
-          raise ArgumentError, message
-        end
-      end
-
-      def sub_types
-        types = {}
-        gtype.children.each do |child|
-          sub_type = child.to_class
-          types[sub_type] = true
-          sub_type.sub_types.each do |sub_sub_type|
-            types[sub_sub_type] = true
-          end
-        end
-        types.keys
-      end
-
-      def try_convert(value)
-        begin
-          resolve(value)
-        rescue ArgumentError
-          nil
-        end
-      end
-
-      private
-      def resolve_class(data_type)
-        components = data_type.to_s.split("_").collect(&:capitalize)
-        data_type_name = components.join.gsub(/\AUint/, "UInt")
-        data_type_class_name = "#{data_type_name}DataType"
-        unless Arrow.const_defined?(data_type_class_name)
-          available_types = []
-          Arrow.constants.each do |name|
-            name = name.to_s
-            next if name == "DataType"
-            next unless name.end_with?("DataType")
-            name = name.gsub(/DataType\z/, "")
-            components = name.scan(/(UInt[0-9]+|[A-Z][a-z\d]+)/).flatten
-            available_types << components.collect(&:downcase).join("_").to_sym
-          end
-          message =
-            "unknown type: <#{data_type.inspect}>: " +
-            "available types: #{available_types.inspect}"
-          raise ArgumentError, message
-        end
-        data_type_class = Arrow.const_get(data_type_class_name)
-        if data_type_class.gtype.abstract?
-          not_abstract_types = data_type_class.sub_types.find_all do |sub_type|
-            not sub_type.gtype.abstract?
-          end
-          not_abstract_types = not_abstract_types.sort_by do |type|
-            type.name
-          end
-          message =
-            "abstract type: <#{data_type.inspect}>: " +
-            "use one of not abstract type: #{not_abstract_types.inspect}"
-          raise ArgumentError, message
-        end
-        data_type_class
-      end
-    end
-
-    def build_array(values)
-      base_name = self.class.name.gsub(/DataType\z/, "")
-      builder_class = self.class.const_get("#{base_name}ArrayBuilder")
-      args = [values]
-      args.unshift(self) unless builder_class.buildable?(args)
-      builder_class.build(*args)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/date32-array-builder.rb b/ruby/red-arrow/lib/arrow/date32-array-builder.rb
deleted file mode 100644
index dedbba8..0000000
--- a/ruby/red-arrow/lib/arrow/date32-array-builder.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Date32ArrayBuilder
-    private
-    UNIX_EPOCH = Date.new(1970, 1, 1)
-    def convert_to_arrow_value(value)
-      value = value.to_date if value.respond_to?(:to_date)
-
-      if value.is_a?(Date)
-        (value - UNIX_EPOCH).to_i
-      else
-        value
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/date32-array.rb b/ruby/red-arrow/lib/arrow/date32-array.rb
deleted file mode 100644
index 121dbcb..0000000
--- a/ruby/red-arrow/lib/arrow/date32-array.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Date32Array
-    def get_value(i)
-      to_date(get_raw_value(i))
-    end
-
-    private
-    UNIX_EPOCH = 2440588
-    def to_date(raw_value)
-      Date.jd(UNIX_EPOCH + raw_value)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/date64-array-builder.rb b/ruby/red-arrow/lib/arrow/date64-array-builder.rb
deleted file mode 100644
index 6581181..0000000
--- a/ruby/red-arrow/lib/arrow/date64-array-builder.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Date64ArrayBuilder
-    private
-    def convert_to_arrow_value(value)
-      if value.respond_to?(:to_time) and not value.is_a?(::Time)
-        value = value.to_time
-      end
-
-      if value.is_a?(::Time)
-        value.to_i * 1_000 + value.usec / 1_000
-      else
-        value
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/date64-array.rb b/ruby/red-arrow/lib/arrow/date64-array.rb
deleted file mode 100644
index 9b8a924..0000000
--- a/ruby/red-arrow/lib/arrow/date64-array.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Date64Array
-    def get_value(i)
-      to_datetime(get_raw_value(i))
-    end
-
-    private
-    def to_datetime(raw_value)
-      ::Time.at(*raw_value.divmod(1_000)).to_datetime
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/decimal128-array-builder.rb b/ruby/red-arrow/lib/arrow/decimal128-array-builder.rb
deleted file mode 100644
index d380ce0..0000000
--- a/ruby/red-arrow/lib/arrow/decimal128-array-builder.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Decimal128ArrayBuilder
-    class << self
-      def build(data_type, values)
-        builder = new(data_type)
-        builder.build(values)
-      end
-    end
-
-    alias_method :append_value_raw, :append_value
-    def append_value(value)
-      append_value_raw(normalize_value(value))
-    end
-
-    alias_method :append_values_raw, :append_values
-    def append_values(values, is_valids=nil)
-      if values.is_a?(::Array)
-        values = values.collect do |value|
-          normalize_value(value)
-        end
-        append_values_raw(values, is_valids)
-      else
-        append_values_packed(values, is_valids)
-      end
-    end
-
-    private
-    def normalize_value(value)
-      case value
-      when String
-        Decimal128.new(value)
-      when Float
-        Decimal128.new(value.to_s)
-      when BigDecimal
-        Decimal128.new(value.to_s)
-      else
-        value
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/decimal128-array.rb b/ruby/red-arrow/lib/arrow/decimal128-array.rb
deleted file mode 100644
index a5ee53b..0000000
--- a/ruby/red-arrow/lib/arrow/decimal128-array.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Decimal128Array
-    def get_value(i)
-      BigDecimal(format_value(i))
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/decimal128-data-type.rb b/ruby/red-arrow/lib/arrow/decimal128-data-type.rb
deleted file mode 100644
index 4b55838..0000000
--- a/ruby/red-arrow/lib/arrow/decimal128-data-type.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Decimal128DataType
-    MAX_PRECISION = max_precision
-
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-
-    # Creates a new {Arrow::Decimal128DataType}.
-    #
-    # @overload initialize(precision, scale)
-    #
-    #   @param precision [Integer] The precision of the decimal data
-    #     type. It's the number of digits including the number of
-    #     digits after the decimal point.
-    #
-    #   @param scale [Integer] The scale of the decimal data
-    #     type. It's the number of digits after the decimal point.
-    #
-    #   @example Create a decimal data type for "XXXXXX.YY" decimal
-    #     Arrow::Decimal128DataType.new(8, 2)
-    #
-    # @overload initialize(description)
-    #
-    #   @param description [Hash] The description of the decimal data
-    #     type. It must have `:precision` and `:scale` values.
-    #
-    #   @option description [Integer] :precision The precision of the
-    #     decimal data type. It's the number of digits including the
-    #     number of digits after the decimal point.
-    #
-    #   @option description [Integer] :scale The scale of the decimal
-    #     data type. It's the number of digits after the decimal
-    #     point.
-    #
-    #   @example Create a decimal data type for "XXXXXX.YY" decimal
-    #     Arrow::Decimal128DataType.new(precision: 8,
-    #                                   scale: 2)
-    def initialize(*args)
-      n_args = args.size
-      case n_args
-      when 1
-        description = args[0]
-        precision = description[:precision]
-        scale = description[:scale]
-      when 2
-        precision, scale = args
-      else
-        message = "wrong number of arguments (given, #{n_args}, expected 1..2)"
-        raise ArgumentError, message
-      end
-      initialize_raw(precision, scale)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/decimal128.rb b/ruby/red-arrow/lib/arrow/decimal128.rb
deleted file mode 100644
index bf853ae..0000000
--- a/ruby/red-arrow/lib/arrow/decimal128.rb
+++ /dev/null
@@ -1,60 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Decimal128
-    alias_method :to_s_raw, :to_s
-
-    # @overload to_s
-    #
-    #   @return [String]
-    #     The string representation of the decimal.
-    #
-    # @overload to_s(scale)
-    #
-    #   @param scale [Integer] The scale of the decimal.
-    #   @return [String]
-    #      The string representation of the decimal including the scale.
-    #
-    # @since 0.13.0
-    def to_s(scale=nil)
-      if scale
-        to_string_scale(scale)
-      else
-        to_s_raw
-      end
-    end
-
-    alias_method :abs!, :abs
-
-    # @since 3.0.0
-    def abs
-      copied = dup
-      copied.abs!
-      copied
-    end
-
-    alias_method :negate!, :negate
-
-    # @since 3.0.0
-    def negate
-      copied = dup
-      copied.negate!
-      copied
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/decimal256-array-builder.rb b/ruby/red-arrow/lib/arrow/decimal256-array-builder.rb
deleted file mode 100644
index fb89ff0..0000000
--- a/ruby/red-arrow/lib/arrow/decimal256-array-builder.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Decimal256ArrayBuilder
-    class << self
-      # @since 3.0.0
-      def build(data_type, values)
-        builder = new(data_type)
-        builder.build(values)
-      end
-    end
-
-    alias_method :append_value_raw, :append_value
-    # @since 3.0.0
-    def append_value(value)
-      append_value_raw(normalize_value(value))
-    end
-
-    alias_method :append_values_raw, :append_values
-    # @since 3.0.0
-    def append_values(values, is_valids=nil)
-      if values.is_a?(::Array)
-        values = values.collect do |value|
-          normalize_value(value)
-        end
-        append_values_raw(values, is_valids)
-      else
-        append_values_packed(values, is_valids)
-      end
-    end
-
-    private
-    def normalize_value(value)
-      case value
-      when String
-        Decimal256.new(value)
-      when Float
-        Decimal256.new(value.to_s)
-      when BigDecimal
-        Decimal256.new(value.to_s)
-      else
-        value
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/decimal256-array.rb b/ruby/red-arrow/lib/arrow/decimal256-array.rb
deleted file mode 100644
index 8c2306d..0000000
--- a/ruby/red-arrow/lib/arrow/decimal256-array.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Decimal256Array
-    # @since 3.0.0
-    def get_value(i)
-      BigDecimal(format_value(i))
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/decimal256-data-type.rb b/ruby/red-arrow/lib/arrow/decimal256-data-type.rb
deleted file mode 100644
index 8264e38..0000000
--- a/ruby/red-arrow/lib/arrow/decimal256-data-type.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Decimal256DataType
-    MAX_PRECISION = max_precision
-
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-
-    # Creates a new {Arrow::Decimal256DataType}.
-    #
-    # @overload initialize(precision, scale)
-    #
-    #   @param precision [Integer] The precision of the decimal data
-    #     type. It's the number of digits including the number of
-    #     digits after the decimal point.
-    #
-    #   @param scale [Integer] The scale of the decimal data
-    #     type. It's the number of digits after the decimal point.
-    #
-    #   @example Create a decimal data type for "XXXXXX.YY" decimal
-    #     Arrow::Decimal256DataType.new(8, 2)
-    #
-    # @overload initialize(description)
-    #
-    #   @param description [Hash] The description of the decimal data
-    #     type. It must have `:precision` and `:scale` values.
-    #
-    #   @option description [Integer] :precision The precision of the
-    #     decimal data type. It's the number of digits including the
-    #     number of digits after the decimal point.
-    #
-    #   @option description [Integer] :scale The scale of the decimal
-    #     data type. It's the number of digits after the decimal
-    #     point.
-    #
-    #   @example Create a decimal data type for "XXXXXX.YY" decimal
-    #     Arrow::Decimal256DataType.new(precision: 8,
-    #                                   scale: 2)
-    #
-    # @since 3.0.0
-    def initialize(*args)
-      n_args = args.size
-      case n_args
-      when 1
-        description = args[0]
-        precision = description[:precision]
-        scale = description[:scale]
-      when 2
-        precision, scale = args
-      else
-        message = "wrong number of arguments (given, #{n_args}, expected 1..2)"
-        raise ArgumentError, message
-      end
-      initialize_raw(precision, scale)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/decimal256.rb b/ruby/red-arrow/lib/arrow/decimal256.rb
deleted file mode 100644
index 1a7097a..0000000
--- a/ruby/red-arrow/lib/arrow/decimal256.rb
+++ /dev/null
@@ -1,60 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Decimal256
-    alias_method :to_s_raw, :to_s
-
-    # @overload to_s
-    #
-    #   @return [String]
-    #     The string representation of the decimal.
-    #
-    # @overload to_s(scale)
-    #
-    #   @param scale [Integer] The scale of the decimal.
-    #   @return [String]
-    #      The string representation of the decimal including the scale.
-    #
-    # @since 3.0.0
-    def to_s(scale=nil)
-      if scale
-        to_string_scale(scale)
-      else
-        to_s_raw
-      end
-    end
-
-    alias_method :abs!, :abs
-
-    # @since 3.0.0
-    def abs
-      copied = dup
-      copied.abs!
-      copied
-    end
-
-    alias_method :negate!, :negate
-
-    # @since 3.0.0
-    def negate
-      copied = dup
-      copied.negate!
-      copied
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/dense-union-data-type.rb b/ruby/red-arrow/lib/arrow/dense-union-data-type.rb
deleted file mode 100644
index 6d2bf5e..0000000
--- a/ruby/red-arrow/lib/arrow/dense-union-data-type.rb
+++ /dev/null
@@ -1,90 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class DenseUnionDataType
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-
-    # Creates a new {Arrow::DenseUnionDataType}.
-    #
-    # @overload initialize(fields, type_codes)
-    #
-    #   @param fields [::Array<Arrow::Field, Hash>] The fields of the
-    #     dense union data type. You can mix {Arrow::Field} and field
-    #     description in the fields.
-    #
-    #     See {Arrow::Field.new} how to specify field description.
-    #
-    #   @param type_codes [::Array<Integer>] The IDs that indicates
-    #     corresponding fields.
-    #
-    #   @example Create a dense union data type for `{2: visible, 9: count}`
-    #     fields = [
-    #       Arrow::Field.new("visible", :boolean),
-    #       {
-    #         name: "count",
-    #         type: :int32,
-    #       },
-    #     ]
-    #     Arrow::DenseUnionDataType.new(fields, [2, 9])
-    #
-    # @overload initialize(description)
-    #
-    #   @param description [Hash] The description of the dense union
-    #     data type. It must have `:fields` and `:type_codes` values.
-    #
-    #   @option description [::Array<Arrow::Field, Hash>] :fields The
-    #     fields of the dense union data type. You can mix
-    #     {Arrow::Field} and field description in the fields.
-    #
-    #     See {Arrow::Field.new} how to specify field description.
-    #
-    #   @option description [::Array<Integer>] :type_codes The IDs
-    #     that indicates corresponding fields.
-    #
-    #   @example Create a dense union data type for `{2: visible, 9: count}`
-    #     fields = [
-    #       Arrow::Field.new("visible", :boolean),
-    #       {
-    #         name: "count",
-    #         type: :int32,
-    #       },
-    #     ]
-    #     Arrow::DenseUnionDataType.new(fields: fields,
-    #                                   type_codes: [2, 9])
-    def initialize(*args)
-      n_args = args.size
-      case n_args
-      when 1
-        description = args[0]
-        fields = description[:fields]
-        type_codes = description[:type_codes]
-      when 2
-        fields, type_codes = args
-      else
-        message = "wrong number of arguments (given, #{n_args}, expected 1..2)"
-        raise ArgumentError, message
-      end
-      fields = fields.collect do |field|
-        field = Field.new(field) unless field.is_a?(Field)
-        field
-      end
-      initialize_raw(fields, type_codes)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/dictionary-array.rb b/ruby/red-arrow/lib/arrow/dictionary-array.rb
deleted file mode 100644
index 70591ab..0000000
--- a/ruby/red-arrow/lib/arrow/dictionary-array.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class DictionaryArray
-    def get_value(i)
-      dictionary[indices[i]]
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/dictionary-data-type.rb b/ruby/red-arrow/lib/arrow/dictionary-data-type.rb
deleted file mode 100644
index 8396e31..0000000
--- a/ruby/red-arrow/lib/arrow/dictionary-data-type.rb
+++ /dev/null
@@ -1,117 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class DictionaryDataType
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-
-    # Creates a new {Arrow::DictionaryDataType}.
-    #
-    # @overload initialize(index_data_type, value_data_type, ordered)
-    #
-    #   @param index_data_type [Arrow::DataType, Hash, String, Symbol]
-    #     The index data type of the dictionary data type. It must be
-    #     signed integer data types. Here are available signed integer
-    #     data types:
-    #
-    #       * Arrow::Int8DataType
-    #       * Arrow::Int16DataType
-    #       * Arrow::Int32DataType
-    #       * Arrow::Int64DataType
-    #
-    #     You can specify data type as a description by `Hash`.
-    #
-    #     See {Arrow::DataType.resolve} how to specify data type
-    #     description.
-    #
-    #   @param value_data_type [Arrow::DataType, Hash, String, Symbol]
-    #     The value data type of the dictionary data type.
-    #
-    #     You can specify data type as a description by `Hash`.
-    #
-    #     See {Arrow::DataType.resolve} how to specify data type
-    #     description.
-    #
-    #   @param ordered [Boolean] Whether dictionary contents are
-    #     ordered or not.
-    #
-    #   @example Create a dictionary data type for `{0: "Hello", 1: "World"}`
-    #     index_data_type = :int8
-    #     value_data_type = :string
-    #     ordered = true
-    #     Arrow::DictionaryDataType.new(index_data_type,
-    #                                   value_data_type,
-    #                                   ordered)
-    #
-    # @overload initialize(description)
-    #
-    #   @param description [Hash] The description of the dictionary
-    #     data type. It must have `:index_data_type`, `:dictionary`
-    #     and `:ordered` values.
-    #
-    #   @option description [Arrow::DataType, Hash, String, Symbol]
-    #     :index_data_type The index data type of the dictionary data
-    #     type. It must be signed integer data types. Here are
-    #     available signed integer data types:
-    #
-    #       * Arrow::Int8DataType
-    #       * Arrow::Int16DataType
-    #       * Arrow::Int32DataType
-    #       * Arrow::Int64DataType
-    #
-    #     You can specify data type as a description by `Hash`.
-    #
-    #     See {Arrow::DataType.resolve} how to specify data type
-    #     description.
-    #
-    #   @option description [Arrow::DataType, Hash, String, Symbol]
-    #     :value_data_type
-    #     The value data type of the dictionary data type.
-    #
-    #     You can specify data type as a description by `Hash`.
-    #
-    #     See {Arrow::DataType.resolve} how to specify data type
-    #     description.
-    #
-    #   @option description [Boolean] :ordered Whether dictionary
-    #     contents are ordered or not.
-    #
-    #   @example Create a dictionary data type for `{0: "Hello", 1: "World"}`
-    #     Arrow::DictionaryDataType.new(index_data_type: :int8,
-    #                                   value_data_type: :string,
-    #                                   ordered: true)
-    def initialize(*args)
-      n_args = args.size
-      case n_args
-      when 1
-        description = args[0]
-        index_data_type = description[:index_data_type]
-        value_data_type = description[:value_data_type]
-        ordered = description[:ordered]
-      when 3
-        index_data_type, value_data_type, ordered = args
-      else
-        message = "wrong number of arguments (given, #{n_args}, expected 1 or 3)"
-        raise ArgumentError, message
-      end
-      index_data_type = DataType.resolve(index_data_type)
-      value_data_type = DataType.resolve(value_data_type)
-      initialize_raw(index_data_type, value_data_type, ordered)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/field-containable.rb b/ruby/red-arrow/lib/arrow/field-containable.rb
deleted file mode 100644
index e4dbf4e..0000000
--- a/ruby/red-arrow/lib/arrow/field-containable.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  module FieldContainable
-    def find_field(name_or_index)
-      case name_or_index
-      when String, Symbol
-        name = name_or_index
-        get_field_by_name(name)
-      when Integer
-        index = name_or_index
-        raise if index < 0
-        index += n_fields if index < 0
-        return nil if index < 0 or index >= n_fields
-        get_field(index)
-      else
-        message = "field name or index must be String, Symbol or Integer"
-        message << ": <#{name_or_index.inspect}>"
-        raise ArgumentError, message
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/field.rb b/ruby/red-arrow/lib/arrow/field.rb
deleted file mode 100644
index e439cb9..0000000
--- a/ruby/red-arrow/lib/arrow/field.rb
+++ /dev/null
@@ -1,118 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Field
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-
-    # Creates a new {Arrow::Field}.
-    #
-    # @overload initialize(name, data_type)
-    #
-    #   @param name [String, Symbol] The name of the field.
-    #
-    #   @param data_type [Arrow::DataType, Hash, String, Symbol] The
-    #     data type of the field.
-    #
-    #     You can specify data type as a description by `Hash`.
-    #
-    #     See {Arrow::DataType.resolve} how to specify data type
-    #     description.
-    #
-    #   @example Create a field with {Arrow::DataType}s
-    #     Arrow::Field.new("visible", Arrow::BooleanDataType.new)
-    #
-    #   @example Create a field with data type description
-    #     Arrow::Field.new("visible", :boolean)
-    #
-    #   @example Create a field with name as `Symbol`
-    #     Arrow::Field.new(:visible, :boolean)
-    #
-    # @overload initialize(description)
-    #
-    #   @param description [Hash] The description of the field.
-    #
-    #     Field description is a raw `Hash`. Field description must
-    #     have `:name` and `:data_type` values. `:name` is the name of
-    #     the field. `:data_type` is the data type of the field. You
-    #     can use {Arrow::DataType} or data type description as
-    #     `:data_type` value.
-    #
-    #     See {Arrow::DataType.resolve} how to specify data type
-    #     description.
-    #
-    #     There is a shortcut for convenience. If field description
-    #     doesn't have `:data_type`, all keys except `:name` are
-    #     processes as data type description. For example, the
-    #     following field descriptions are the same:
-    #
-    #     ```ruby
-    #     {name: "visible", data_type: {type: :boolean}}
-    #     {name: "visible", type: :boolean} # Shortcut version
-    #     ```
-    #
-    #   @option description [String, Symbol] :name The name of the field.
-    #
-    #   @option description [Arrow::DataType, Hash] :data_type The
-    #     data type of the field. You can specify data type description
-    #     by `Hash`.
-    #
-    #     See {Arrow::DataType.resolve} how to specify data type
-    #     description.
-    #
-    #   @example Create a field with {Arrow::DataType}s
-    #     Arrow::Field.new(name: "visible",
-    #                      data_type: Arrow::BooleanDataType.new)
-    #
-    #   @example Create a field with data type description
-    #     Arrow::Field.new(name: "visible", data_type: {type: :boolean}
-    #
-    #   @example Create a field with shortcut form
-    #     Arrow::Field.new(name: "visible", type: :boolean)
-    def initialize(*args)
-      n_args = args.size
-      case n_args
-      when 1
-        description = args[0]
-        name = nil
-        data_type = nil
-        data_type_description = {}
-        description.each do |key, value|
-          key = key.to_sym
-          case key
-          when :name
-            name = value
-          when :data_type
-            data_type = DataType.resolve(value)
-          else
-            data_type_description[key] = value
-          end
-        end
-        data_type ||= DataType.resolve(data_type_description)
-      when 2
-        name = args[0]
-        data_type = DataType.resolve(args[1])
-      else
-        message = "wrong number of arguments (given #{n_args}, expected 1..2)"
-        raise ArgumentError, message
-      end
-
-      initialize_raw(name, data_type)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/file-output-stream.rb b/ruby/red-arrow/lib/arrow/file-output-stream.rb
deleted file mode 100644
index f39ad14..0000000
--- a/ruby/red-arrow/lib/arrow/file-output-stream.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class FileOutputStream
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-    def initialize(path, options={})
-      append = nil
-      case options
-      when true, false
-        append = options
-      when Hash
-        append = options[:append]
-      end
-      append = false if append.nil?
-      initialize_raw(path, append)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/fixed-size-binary-array-builder.rb b/ruby/red-arrow/lib/arrow/fixed-size-binary-array-builder.rb
deleted file mode 100644
index 516d814..0000000
--- a/ruby/red-arrow/lib/arrow/fixed-size-binary-array-builder.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class FixedSizeBinaryArrayBuilder
-    class << self
-      # @since 3.0.0
-      def build(data_type, values)
-        builder = new(data_type)
-        builder.build(values)
-      end
-    end
-
-    alias_method :append_values_raw, :append_values
-    # @since 3.0.0
-    def append_values(values, is_valids=nil)
-      if values.is_a?(::Array)
-        append_values_raw(values, is_valids)
-      else
-        append_values_packed(values, is_valids)
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/fixed-size-binary-array.rb b/ruby/red-arrow/lib/arrow/fixed-size-binary-array.rb
deleted file mode 100644
index 37c121d..0000000
--- a/ruby/red-arrow/lib/arrow/fixed-size-binary-array.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class FixedSizeBinaryArray
-    alias_method :get_value_raw, :get_value
-    # @since 3.0.0
-    def get_value(i)
-      get_value_raw(i).to_s
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/generic-filterable.rb b/ruby/red-arrow/lib/arrow/generic-filterable.rb
deleted file mode 100644
index 50a7914..0000000
--- a/ruby/red-arrow/lib/arrow/generic-filterable.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  module GenericFilterable
-    class << self
-      def included(base)
-        base.__send__(:alias_method, :filter_raw, :filter)
-        base.__send__(:alias_method, :filter, :filter_generic)
-      end
-    end
-
-    def filter_generic(filter, options=nil)
-      case filter
-      when ::Array
-        filter_raw(BooleanArray.new(filter), options)
-      when ChunkedArray
-        if respond_to?(:filter_chunked_array)
-          filter_chunked_array(filter, options)
-        else
-          # TODO: Implement this in C++
-          filter_raw(filter.pack, options)
-        end
-      else
-        filter_raw(filter, options)
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/generic-takeable.rb b/ruby/red-arrow/lib/arrow/generic-takeable.rb
deleted file mode 100644
index f32b43f..0000000
--- a/ruby/red-arrow/lib/arrow/generic-takeable.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  module GenericTakeable
-    class << self
-      def included(base)
-        base.__send__(:alias_method, :take_raw, :take)
-        base.__send__(:alias_method, :take, :take_generic)
-      end
-    end
-
-    def take_generic(indices)
-      case indices
-      when ::Array
-        take_raw(IntArrayBuilder.build(indices))
-      when ChunkedArray
-        take_chunked_array(indices)
-      else
-        take_raw(indices)
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/group.rb b/ruby/red-arrow/lib/arrow/group.rb
deleted file mode 100644
index 568e0e8..0000000
--- a/ruby/red-arrow/lib/arrow/group.rb
+++ /dev/null
@@ -1,172 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  # Experimental
-  #
-  # TODO: Almost codes should be implemented in Apache Arrow C++.
-  class Group
-    def initialize(table, keys)
-      @table = table
-      @keys = keys
-    end
-
-    def count
-      key_names = @keys.collect(&:to_s)
-      target_columns = @table.columns.reject do |column|
-        key_names.include?(column.name)
-      end
-      aggregate(target_columns) do |column, indexes|
-        n = 0
-        indexes.each do |index|
-          n += 1 unless column.null?(index)
-        end
-        n
-      end
-    end
-
-    def sum
-      key_names = @keys.collect(&:to_s)
-      target_columns = @table.columns.reject do |column|
-        key_names.include?(column.name) or
-          not column.data_type.is_a?(NumericDataType)
-      end
-      aggregate(target_columns) do |column, indexes|
-        n = 0
-        indexes.each do |index|
-          value = column[index]
-          n += value unless value.nil?
-        end
-        n
-      end
-    end
-
-    def average
-      key_names = @keys.collect(&:to_s)
-      target_columns = @table.columns.reject do |column|
-        key_names.include?(column.name) or
-          not column.data_type.is_a?(NumericDataType)
-      end
-      aggregate(target_columns) do |column, indexes|
-        average = 0.0
-        n = 0
-        indexes.each do |index|
-          value = column[index]
-          unless value.nil?
-            n += 1
-            average += (value - average) / n
-          end
-        end
-        average
-      end
-    end
-
-    def min
-      key_names = @keys.collect(&:to_s)
-      target_columns = @table.columns.reject do |column|
-        key_names.include?(column.name) or
-          not column.data_type.is_a?(NumericDataType)
-      end
-      aggregate(target_columns) do |column, indexes|
-        n = nil
-        indexes.each do |index|
-          value = column[index]
-          next if value.nil?
-          n ||= value
-          n = value if value < n
-        end
-        n
-      end
-    end
-
-    def max
-      key_names = @keys.collect(&:to_s)
-      target_columns = @table.columns.reject do |column|
-        key_names.include?(column.name) or
-          not column.data_type.is_a?(NumericDataType)
-      end
-      aggregate(target_columns) do |column, indexes|
-        n = nil
-        indexes.each do |index|
-          value = column[index]
-          next if value.nil?
-          n ||= value
-          n = value if value > n
-        end
-        n
-      end
-    end
-
-    private
-    def aggregate(target_columns)
-      sort_values = @table.n_rows.times.collect do |i|
-        key_values = @keys.collect do |key|
-          @table[key][i]
-        end
-        [key_values, i]
-      end
-      sorted = sort_values.sort_by do |key_values, i|
-        key_values
-      end
-
-      grouped_keys = []
-      aggregated_arrays_raw = []
-      target_columns.size.times do
-        aggregated_arrays_raw << []
-      end
-      indexes = []
-      sorted.each do |key_values, i|
-        if grouped_keys.empty?
-          grouped_keys << key_values
-          indexes.clear
-          indexes << i
-        else
-          if key_values == grouped_keys.last
-            indexes << i
-          else
-            grouped_keys << key_values
-            target_columns.each_with_index do |column, j|
-              aggregated_arrays_raw[j] << yield(column, indexes)
-            end
-            indexes.clear
-            indexes << i
-          end
-        end
-      end
-      target_columns.each_with_index do |column, j|
-        aggregated_arrays_raw[j] << yield(column, indexes)
-      end
-
-      grouped_key_arrays_raw = grouped_keys.transpose
-      fields = []
-      arrays = []
-      @keys.each_with_index do |key, i|
-        key_column = @table[key]
-        key_column_array_raw = grouped_key_arrays_raw[i]
-        key_column_array = key_column.data_type.build_array(key_column_array_raw)
-        fields << key_column.field
-        arrays << key_column_array
-      end
-      target_columns.each_with_index do |column, i|
-        array = ArrayBuilder.build(aggregated_arrays_raw[i])
-        arrays << array
-        fields << Field.new(column.field.name, array.value_data_type)
-      end
-      Table.new(fields, arrays)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/list-array-builder.rb b/ruby/red-arrow/lib/arrow/list-array-builder.rb
deleted file mode 100644
index d889c8a..0000000
--- a/ruby/red-arrow/lib/arrow/list-array-builder.rb
+++ /dev/null
@@ -1,96 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class ListArrayBuilder
-    class << self
-      def build(data_type, values)
-        builder = new(data_type)
-        builder.build(values)
-      end
-    end
-
-    alias_method :append_value_raw, :append_value
-
-    # @overload append_value
-    #
-    #   Starts appending a list record. You also need to append list
-    #   value by {#value_builder}.
-    #
-    # @overload append_value(list)
-    #
-    #   Appends a list record including list value.
-    #
-    #   @param value [nil, ::Array] The list value of the record.
-    #
-    #     If this is `nil`, the list record is null.
-    #
-    #     If this is `Array`, it's the list value of the record.
-    #
-    # @since 0.12.0
-    def append_value(*args)
-      n_args = args.size
-
-      case n_args
-      when 0
-        append_value_raw
-      when 1
-        value = args[0]
-        case value
-        when nil
-          append_null
-        when ::Array
-          append_value_raw
-          @value_builder ||= value_builder
-          @value_builder.append(*value)
-        else
-          message = "list value must be nil or Array: #{value.inspect}"
-          raise ArgumentError, message
-        end
-      else
-        message = "wrong number of arguments (given #{n_args}, expected 0..1)"
-        raise ArgumentError, message
-      end
-    end
-
-    def append_values(lists, is_valids=nil)
-      if is_valids
-        is_valids.each_with_index do |is_valid, i|
-          if is_valid
-            append_value(lists[i])
-          else
-            append_null
-          end
-        end
-      else
-        lists.each do |list|
-          append_value(list)
-        end
-      end
-    end
-
-    # @since 0.12.0
-    def append(*values)
-      if values.empty?
-        # For backward compatibility
-        append_value
-      else
-        super
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/list-data-type.rb b/ruby/red-arrow/lib/arrow/list-data-type.rb
deleted file mode 100644
index cfcdd2a..0000000
--- a/ruby/red-arrow/lib/arrow/list-data-type.rb
+++ /dev/null
@@ -1,118 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class ListDataType
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-
-    # Creates a new {Arrow::ListDataType}.
-    #
-    # @overload initialize(field)
-    #
-    #   @param field [Arrow::Field, Hash] The field of the list data
-    #     type. You can also specify field description by `Hash`.
-    #
-    #     See {Arrow::Field.new} how to specify field description.
-    #
-    #   @example Create a list data type with {Arrow::Field}
-    #     visible_field = Arrow::Field.new("visible", :boolean)
-    #     Arrow::ListDataType.new(visible_field)
-    #
-    #   @example Create a list data type with field description
-    #     Arrow::ListDataType.new(name: "visible", type: :boolean)
-    #
-    # @overload initialize(description)
-    #
-    #   @param description [Hash] The description of the list data
-    #     type. It must have `:field` value.
-    #
-    #   @option description [Arrow::Field, Hash] :field The field of
-    #     the list data type. You can also specify field description
-    #     by `Hash`.
-    #
-    #     See {Arrow::Field.new} how to specify field description.
-    #
-    #   @example Create a list data type with {Arrow::Field}
-    #     visible_field = Arrow::Field.new("visible", :boolean)
-    #     Arrow::ListDataType.new(field: visible_field)
-    #
-    #   @example Create a list data type with field description
-    #     Arrow::ListDataType.new(field: {name: "visible", type: :boolean})
-    #
-    # @overload initialize(data_type)
-    #
-    #   @param data_type [Arrow::DataType, String, Symbol,
-    #     ::Array<String>, ::Array<Symbol>, Hash] The element data
-    #     type of the list data type. A field is created with the
-    #     default name `"item"` from the data type automatically.
-    #
-    #     See {Arrow::DataType.resolve} how to specify data type.
-    #
-    #   @example Create a list data type with {Arrow::DataType}
-    #     Arrow::ListDataType.new(Arrow::BooleanDataType.new)
-    #
-    #   @example Create a list data type with data type name as String
-    #     Arrow::ListDataType.new("boolean")
-    #
-    #   @example Create a list data type with data type name as Symbol
-    #     Arrow::ListDataType.new(:boolean)
-    #
-    #   @example Create a list data type with data type as Array
-    #     Arrow::ListDataType.new([:time32, :milli])
-    def initialize(arg)
-      data_type = resolve_data_type(arg)
-      if data_type
-        field = Field.new(default_field_name, data_type)
-      else
-        field = resolve_field(arg)
-      end
-      initialize_raw(field)
-    end
-
-    private
-    def resolve_data_type(arg)
-      case arg
-      when DataType, String, Symbol, ::Array
-        DataType.resolve(arg)
-      when Hash
-        return nil if arg[:name]
-        return nil unless arg[:type]
-        DataType.resolve(arg)
-      else
-        nil
-      end
-    end
-
-    def default_field_name
-      "item"
-    end
-
-    def resolve_field(arg)
-      if arg.is_a?(Hash) and arg.key?(:field)
-        description = arg
-        arg = description[:field]
-      end
-      if arg.is_a?(Hash)
-        field_description = arg
-        Field.new(field_description)
-      else
-        arg
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/loader.rb b/ruby/red-arrow/lib/arrow/loader.rb
deleted file mode 100644
index 81a4c20..0000000
--- a/ruby/red-arrow/lib/arrow/loader.rb
+++ /dev/null
@@ -1,172 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "arrow/block-closable"
-
-module Arrow
-  class Loader < GObjectIntrospection::Loader
-    class << self
-      def load
-        super("Arrow", Arrow)
-      end
-    end
-
-    private
-    def post_load(repository, namespace)
-      require_libraries
-      require_extension_library
-    end
-
-    def require_libraries
-      require "arrow/column-containable"
-      require "arrow/field-containable"
-      require "arrow/generic-filterable"
-      require "arrow/generic-takeable"
-      require "arrow/record-containable"
-
-      require "arrow/array"
-      require "arrow/array-builder"
-      require "arrow/bigdecimal-extension"
-      require "arrow/buffer"
-      require "arrow/chunked-array"
-      require "arrow/column"
-      require "arrow/compression-type"
-      require "arrow/csv-loader"
-      require "arrow/csv-read-options"
-      require "arrow/data-type"
-      require "arrow/date32-array"
-      require "arrow/date32-array-builder"
-      require "arrow/date64-array"
-      require "arrow/date64-array-builder"
-      require "arrow/decimal128"
-      require "arrow/decimal128-array"
-      require "arrow/decimal128-array-builder"
-      require "arrow/decimal128-data-type"
-      require "arrow/decimal256"
-      require "arrow/decimal256-array"
-      require "arrow/decimal256-array-builder"
-      require "arrow/decimal256-data-type"
-      require "arrow/dense-union-data-type"
-      require "arrow/dictionary-array"
-      require "arrow/dictionary-data-type"
-      require "arrow/field"
-      require "arrow/file-output-stream"
-      require "arrow/fixed-size-binary-array"
-      require "arrow/fixed-size-binary-array-builder"
-      require "arrow/group"
-      require "arrow/list-array-builder"
-      require "arrow/list-data-type"
-      require "arrow/null-array"
-      require "arrow/null-array-builder"
-      require "arrow/path-extension"
-      require "arrow/record"
-      require "arrow/record-batch"
-      require "arrow/record-batch-builder"
-      require "arrow/record-batch-file-reader"
-      require "arrow/record-batch-iterator"
-      require "arrow/record-batch-stream-reader"
-      require "arrow/rolling-window"
-      require "arrow/schema"
-      require "arrow/slicer"
-      require "arrow/sort-key"
-      require "arrow/sort-options"
-      require "arrow/sparse-union-data-type"
-      require "arrow/struct-array"
-      require "arrow/struct-array-builder"
-      require "arrow/struct-data-type"
-      require "arrow/table"
-      require "arrow/table-formatter"
-      require "arrow/table-list-formatter"
-      require "arrow/table-table-formatter"
-      require "arrow/table-loader"
-      require "arrow/table-saver"
-      require "arrow/tensor"
-      require "arrow/time"
-      require "arrow/time32-array"
-      require "arrow/time32-array-builder"
-      require "arrow/time32-data-type"
-      require "arrow/time64-array"
-      require "arrow/time64-array-builder"
-      require "arrow/time64-data-type"
-      require "arrow/timestamp-array"
-      require "arrow/timestamp-array-builder"
-      require "arrow/timestamp-data-type"
-      require "arrow/writable"
-    end
-
-    def require_extension_library
-      require "arrow.so"
-    end
-
-    def load_object_info(info)
-      super
-
-      klass = @base_module.const_get(rubyish_class_name(info))
-      if klass.method_defined?(:close)
-        klass.extend(BlockClosable)
-      end
-    end
-
-    def load_method_info(info, klass, method_name)
-      case klass.name
-      when /Array\z/
-        case method_name
-        when "values"
-          method_name = "values_raw"
-        end
-      end
-
-      case klass.name
-      when /Builder\z/
-        case method_name
-        when "append"
-          return
-        else
-          super
-        end
-      when "Arrow::StringArray"
-        case method_name
-        when "get_value"
-          method_name = "get_raw_value"
-        when "get_string"
-          method_name = "get_value"
-        end
-        super(info, klass, method_name)
-      when "Arrow::Date32Array",
-           "Arrow::Date64Array",
-           "Arrow::Decimal128Array",
-           "Arrow::Decimal256Array",
-           "Arrow::Time32Array",
-           "Arrow::Time64Array",
-           "Arrow::TimestampArray"
-        case method_name
-        when "get_value"
-          method_name = "get_raw_value"
-        end
-        super(info, klass, method_name)
-      when "Arrow::Decimal128", "Arrow::Decimal256"
-        case method_name
-        when "copy"
-          method_name = "dup"
-        end
-        super(info, klass, method_name)
-      else
-        super
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/null-array-builder.rb b/ruby/red-arrow/lib/arrow/null-array-builder.rb
deleted file mode 100644
index 26e58cc..0000000
--- a/ruby/red-arrow/lib/arrow/null-array-builder.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class NullArrayBuilder
-    class << self
-      def buildable?(args)
-        super and not (args.size == 1 and args[0].is_a?(Integer))
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/null-array.rb b/ruby/red-arrow/lib/arrow/null-array.rb
deleted file mode 100644
index 7426bb3..0000000
--- a/ruby/red-arrow/lib/arrow/null-array.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class NullArray
-    def get_value(i)
-      nil
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/path-extension.rb b/ruby/red-arrow/lib/arrow/path-extension.rb
deleted file mode 100644
index 7d32672..0000000
--- a/ruby/red-arrow/lib/arrow/path-extension.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class PathExtension
-    def initialize(path)
-      @path = path
-    end
-
-    def extract
-      basename = ::File.basename(@path)
-      components = basename.split(".")
-      return {} if components.size == 1
-
-      extension = components.last.downcase
-      if components.size > 2
-        compression = CompressionType.resolve_extension(extension)
-        if compression
-          {
-            format: components[-2].downcase,
-            compression: compression,
-          }
-        else
-          {format: extension}
-        end
-      else
-        {format: extension}
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/raw-table-converter.rb b/ruby/red-arrow/lib/arrow/raw-table-converter.rb
deleted file mode 100644
index 41d331f..0000000
--- a/ruby/red-arrow/lib/arrow/raw-table-converter.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class RawTableConverter
-    attr_reader :n_rows
-    attr_reader :schema
-    attr_reader :values
-    def initialize(raw_table)
-      @raw_table = raw_table
-      convert
-    end
-
-    private
-    def convert
-      if @raw_table.is_a?(::Array) and @raw_table[0].is_a?(Column)
-        fields = @raw_table.collect(&:field)
-        @schema = Schema.new(fields)
-        @values = @raw_table.collect(&:data)
-      else
-        fields = []
-        @values = []
-        @raw_table.each do |name, array|
-          array = ArrayBuilder.build(array) if array.is_a?(::Array)
-          fields << Field.new(name.to_s, array.value_data_type)
-          @values << array
-        end
-        @schema = Schema.new(fields)
-      end
-      @n_rows = @values[0].length
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/record-batch-builder.rb b/ruby/red-arrow/lib/arrow/record-batch-builder.rb
deleted file mode 100644
index dc20312..0000000
--- a/ruby/red-arrow/lib/arrow/record-batch-builder.rb
+++ /dev/null
@@ -1,114 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class RecordBatchBuilder
-    class << self
-      # @since 0.12.0
-      def build(schema, data)
-        builder = new(schema)
-        builder.append(data)
-        builder.flush
-      end
-    end
-
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-    def initialize(schema)
-      unless schema.is_a?(Schema)
-        schema = Schema.new(schema)
-      end
-      initialize_raw(schema)
-      @name_to_index = {}
-      schema.fields.each_with_index do |field, i|
-        @name_to_index[field.name] = i
-      end
-    end
-
-    # @since 0.12.0
-    def [](name_or_index)
-      case name_or_index
-      when String, Symbol
-        name = name_or_index
-        self[resolve_name(name)]
-      else
-        index = name_or_index
-        column_builders[index]
-      end
-    end
-
-    # @since 0.12.0
-    def append(*values)
-      values.each do |value|
-        case value
-        when Hash
-          append_columns(value)
-        else
-          append_records(value)
-        end
-      end
-    end
-
-    # @since 0.12.0
-    def append_records(records)
-      n = n_columns
-      columns = n.times.collect do
-        []
-      end
-      records.each_with_index do |record, nth_record|
-        case record
-        when nil
-        when Hash
-          record.each do |name, value|
-            nth_column = resolve_name(name)
-            next if nth_column.nil?
-            columns[nth_column] << value
-          end
-        else
-          record.each_with_index do |value, nth_column|
-            columns[nth_column] << value
-          end
-        end
-        columns.each do |column|
-          column << nil if column.size != (nth_record + 1)
-        end
-      end
-      columns.each_with_index do |column, i|
-        self[i].append(*column)
-      end
-    end
-
-    # @since 0.12.0
-    def append_columns(columns)
-      columns.each do |name, values|
-        self[name].append(*values)
-      end
-    end
-
-    # @since 0.13.0
-    def column_builders
-      @column_builders ||= n_columns.times.collect do |i|
-        get_column_builder(i)
-      end
-    end
-
-    private
-    def resolve_name(name)
-      @name_to_index[name.to_s]
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/record-batch-file-reader.rb b/ruby/red-arrow/lib/arrow/record-batch-file-reader.rb
deleted file mode 100644
index 86a757e..0000000
--- a/ruby/red-arrow/lib/arrow/record-batch-file-reader.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class RecordBatchFileReader
-    include Enumerable
-
-    def each
-      n_record_batches.times do |i|
-        yield(get_record_batch(i))
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/record-batch-iterator.rb b/ruby/red-arrow/lib/arrow/record-batch-iterator.rb
deleted file mode 100644
index 4b828c6d..0000000
--- a/ruby/red-arrow/lib/arrow/record-batch-iterator.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class RecordBatchIterator
-    alias_method :to_a, :to_list
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/record-batch-stream-reader.rb b/ruby/red-arrow/lib/arrow/record-batch-stream-reader.rb
deleted file mode 100644
index fa15c80..0000000
--- a/ruby/red-arrow/lib/arrow/record-batch-stream-reader.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class RecordBatchStreamReader
-    include Enumerable
-
-    def each
-      loop do
-        record_batch = next_record_batch
-        break if record_batch.nil?
-        yield(record_batch)
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/record-batch.rb b/ruby/red-arrow/lib/arrow/record-batch.rb
deleted file mode 100644
index e7ebf50..0000000
--- a/ruby/red-arrow/lib/arrow/record-batch.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-require "arrow/raw-table-converter"
-
-module Arrow
-  class RecordBatch
-    include ColumnContainable
-    include RecordContainable
-    include Enumerable
-
-    class << self
-      def new(*args)
-        n_args = args.size
-        case n_args
-        when 1
-          raw_table_converter = RawTableConverter.new(args[0])
-          n_rows = raw_table_converter.n_rows
-          schema = raw_table_converter.schema
-          values = raw_table_converter.values
-          super(schema, n_rows, values)
-        when 2
-          schema, data = args
-          RecordBatchBuilder.build(schema, data)
-        when 3
-          super
-        else
-          message = "wrong number of arguments (given #{n_args}, expected 1..3)"
-          raise ArgumentError, message
-        end
-      end
-    end
-
-    alias_method :each, :each_record
-
-    alias_method :size, :n_rows
-    alias_method :length, :n_rows
-
-    alias_method :[], :find_column
-
-    # Converts the record batch to {Arrow::Table}.
-    #
-    # @return [Arrow::Table]
-    #
-    # @since 0.12.0
-    def to_table
-      Table.new(schema, [self])
-    end
-
-    def respond_to_missing?(name, include_private)
-      return true if find_column(name)
-      super
-    end
-
-    def method_missing(name, *args, &block)
-      if args.empty?
-        column = find_column(name)
-        return column if column
-      end
-      super
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/record-containable.rb b/ruby/red-arrow/lib/arrow/record-containable.rb
deleted file mode 100644
index 20c9ac2..0000000
--- a/ruby/red-arrow/lib/arrow/record-containable.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  module RecordContainable
-    def each_record(reuse_record: false)
-      unless block_given?
-        return to_enum(__method__, reuse_record: reuse_record)
-      end
-
-      if reuse_record
-        record = Record.new(self, nil)
-        n_rows.times do |i|
-          record.index = i
-          yield(record)
-        end
-      else
-        n_rows.times do |i|
-          yield(Record.new(self, i))
-        end
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/record.rb b/ruby/red-arrow/lib/arrow/record.rb
deleted file mode 100644
index 6f83dde..0000000
--- a/ruby/red-arrow/lib/arrow/record.rb
+++ /dev/null
@@ -1,60 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Record
-    attr_reader :container
-    attr_accessor :index
-    def initialize(container, index)
-      @container = container
-      @index = index
-    end
-
-    def [](column_name_or_column_index)
-      column = @container.find_column(column_name_or_column_index)
-      return nil if column.nil?
-      column[@index]
-    end
-
-    def to_a
-      @container.columns.collect do |column|
-        column[@index]
-      end
-    end
-
-    def to_h
-      attributes = {}
-      @container.columns.each do |column|
-        attributes[column.name] = column[@index]
-      end
-      attributes
-    end
-
-    def respond_to_missing?(name, include_private)
-      return true if @container.find_column(name)
-      super
-    end
-
-    def method_missing(name, *args, &block)
-      if args.empty?
-        column = @container.find_column(name)
-        return column[@index] if column
-      end
-      super
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/rolling-window.rb b/ruby/red-arrow/lib/arrow/rolling-window.rb
deleted file mode 100644
index 1db03bb..0000000
--- a/ruby/red-arrow/lib/arrow/rolling-window.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  # Experimental
-  #
-  # TODO: Almost codes should be implemented in Apache Arrow C++.
-  class RollingWindow
-    def initialize(table, size)
-      @table = table
-      @size = size
-    end
-
-    def lag(key, diff: 1)
-      column = @table[key]
-      if @size
-        windows = column.each_slice(@size)
-      else
-        windows = column
-      end
-      lag_values = [nil] * diff
-      windows.each_cons(diff + 1) do |values|
-        target = values[0]
-        current = values[1]
-        if target.nil? or current.nil?
-          lag_values << nil
-        else
-          lag_values << current - target
-        end
-      end
-      ArrayBuilder.build(lag_values)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/schema.rb b/ruby/red-arrow/lib/arrow/schema.rb
deleted file mode 100644
index 03354c8..0000000
--- a/ruby/red-arrow/lib/arrow/schema.rb
+++ /dev/null
@@ -1,100 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class Schema
-    include FieldContainable
-
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-
-    # Creates a new {Arrow::Schema}.
-    #
-    # @overload initialize(fields)
-    #
-    #   @param fields [::Array<Arrow::Field, Hash>] The fields of the
-    #     schema. You can mix {Arrow::Field} and field description in
-    #     the fields.
-    #
-    #     See {Arrow::Field.new} how to specify field description.
-    #
-    #   @example Create a schema with {Arrow::Field}s
-    #     visible_field = Arrow::Field.new("visible", :boolean)
-    #     Arrow::Schema.new([visible_field])
-    #
-    #   @example Create a schema with field descriptions
-    #      visible_field_description = {
-    #        name: "visible",
-    #        data_type: :boolean,
-    #      }
-    #      Arrow::Schema.new([visible_field_description])
-    #
-    #   @example Create a schema with {Arrow::Field}s and field descriptions
-    #      fields = [
-    #        Arrow::Field.new("visible", :boolean),
-    #        {
-    #          name: "count",
-    #          type: :int32,
-    #        },
-    #      ]
-    #      Arrow::Schema.new(fields)
-    #
-    # @overload initialize(fields)
-    #
-    #   @param fields [Hash{String, Symbol => Arrow::DataType, Hash}]
-    #     The pairs of field name and field data type of the schema.
-    #     You can mix {Arrow::DataType} and data description for field
-    #     data type.
-    #
-    #     See {Arrow::DataType.new} how to specify data type description.
-    #
-    #   @example Create a schema with fields
-    #      fields = {
-    #        "visible" => Arrow::BooleanDataType.new,
-    #        :count => :int32,
-    #        :tags => {
-    #          type: :list,
-    #          field: {
-    #            name: "tag",
-    #            type: :string,
-    #          },
-    #        },
-    #      }
-    #      Arrow::Schema.new(fields)
-    def initialize(fields)
-      case fields
-      when ::Array
-        fields = fields.collect do |field|
-          field = Field.new(field) unless field.is_a?(Field)
-          field
-        end
-      when Hash
-        fields = fields.collect do |name, data_type|
-          Field.new(name, data_type)
-        end
-      end
-      initialize_raw(fields)
-    end
-
-    alias_method :[], :find_field
-
-    alias_method :to_s_raw, :to_s
-    def to_s(show_metadata: false)
-      to_string_metadata(show_metadata)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/slicer.rb b/ruby/red-arrow/lib/arrow/slicer.rb
deleted file mode 100644
index fa83476..0000000
--- a/ruby/red-arrow/lib/arrow/slicer.rb
+++ /dev/null
@@ -1,454 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  # Experimental
-  #
-  # TODO: Almost codes should be implemented in Apache Arrow C++.
-  class Slicer
-    def initialize(table)
-      @table = table
-    end
-
-    def [](column_name)
-      column = @table[column_name]
-      return nil if column.nil?
-      ColumnCondition.new(column)
-    end
-
-    def respond_to_missing?(name, include_private)
-      return true if self[name]
-      super
-    end
-
-    def method_missing(name, *args, &block)
-      if args.empty?
-        column_condition = self[name]
-        return column_condition if column_condition
-      end
-      super
-    end
-
-    class Condition
-      def evaluate
-        message = "Slicer::Condition must define \#evaluate: #{inspect}"
-        raise NotImplementedError.new(message)
-      end
-
-      def &(condition)
-        AndCondition.new(self, condition)
-      end
-
-      def |(condition)
-        OrCondition.new(self, condition)
-      end
-
-      def ^(condition)
-        XorCondition.new(self, condition)
-      end
-    end
-
-    class LogicalCondition < Condition
-      def initialize(condition1, condition2)
-        @condition1 = condition1
-        @condition2 = condition2
-      end
-
-      def evaluate
-        values1 = @condition1.evaluate.each
-        values2 = @condition2.evaluate.each
-        raw_array = []
-        begin
-          loop do
-            value1 = values1.next
-            value2 = values2.next
-            if value1.nil? or value2.nil?
-              raw_array << nil
-            else
-              raw_array << evaluate_value(value1, value2)
-            end
-          end
-        rescue StopIteration
-        end
-        BooleanArray.new(raw_array)
-      end
-    end
-
-    class AndCondition < LogicalCondition
-      private
-      def evaluate_value(value1, value2)
-        value1 and value2
-      end
-    end
-
-    class OrCondition < LogicalCondition
-      private
-      def evaluate_value(value1, value2)
-        value1 or value2
-      end
-    end
-
-    class XorCondition < LogicalCondition
-      private
-      def evaluate_value(value1, value2)
-        value1 ^ value2
-      end
-    end
-
-    class ColumnCondition < Condition
-      def initialize(column)
-        @column = column
-      end
-
-      def evaluate
-        data = @column.data
-
-        case @column.data_type
-        when BooleanDataType
-          data
-        else
-          if data.n_chunks == 1
-            data.get_chunk(0).cast(BooleanDataType.new, nil)
-          else
-            arrays = data.each_chunk.collect do |chunk|
-              chunk.cast(BooleanDataType.new, nil)
-            end
-            ChunkedArray.new(arrays)
-          end
-        end
-      end
-
-      def !@
-        NotColumnCondition.new(@column)
-      end
-
-      def null?
-        self == nil
-      end
-
-      def valid?
-        self != nil
-      end
-
-      def ==(value)
-        EqualCondition.new(@column, value)
-      end
-
-      def !=(value)
-        NotEqualCondition.new(@column, value)
-      end
-
-      def <(value)
-        LessCondition.new(@column, value)
-      end
-
-      def <=(value)
-        LessEqualCondition.new(@column, value)
-      end
-
-      def >(value)
-        GreaterCondition.new(@column, value)
-      end
-
-      def >=(value)
-        GreaterEqualCondition.new(@column, value)
-      end
-
-      def in?(values)
-        InCondition.new(@column, values)
-      end
-
-      def select(&block)
-        SelectCondition.new(@column, block)
-      end
-
-      def reject(&block)
-        RejectCondition.new(@column, block)
-      end
-    end
-
-    class NotColumnCondition < Condition
-      def initialize(column)
-        @column = column
-      end
-
-      def evaluate
-        data = @column.data
-        raw_array = []
-        data.each_chunk do |chunk|
-          if chunk.is_a?(BooleanArray)
-            boolean_array = chunk
-          else
-            boolean_array = chunk.cast(BooleanDataType.new, nil)
-          end
-          boolean_array.each do |value|
-            if value.nil?
-              raw_array << value
-            else
-              raw_array << !value
-            end
-          end
-        end
-        BooleanArray.new(raw_array)
-      end
-
-      def !@
-        ColumnCondition.new(@column)
-      end
-    end
-
-    class EqualCondition < Condition
-      def initialize(column, value)
-        @column = column
-        @value = value
-      end
-
-      def !@
-        NotEqualCondition.new(@column, @value)
-      end
-
-      def evaluate
-        case @value
-        when nil
-          raw_array = @column.collect(&:nil?)
-          BooleanArray.new(raw_array)
-        else
-          raw_array = @column.collect do |value|
-            if value.nil?
-              nil
-            else
-              @value == value
-            end
-          end
-          BooleanArray.new(raw_array)
-        end
-      end
-    end
-
-    class NotEqualCondition < Condition
-      def initialize(column, value)
-        @column = column
-        @value = value
-      end
-
-      def !@
-        EqualCondition.new(@column, @value)
-      end
-
-      def evaluate
-        case @value
-        when nil
-          if @column.n_nulls.zero?
-            raw_array = [true] * @column.n_rows
-          else
-            raw_array = @column.n_rows.times.collect do |i|
-              @column.valid?(i)
-            end
-          end
-          BooleanArray.new(raw_array)
-        else
-          raw_array = @column.collect do |value|
-            if value.nil?
-              nil
-            else
-              @value != value
-            end
-          end
-          BooleanArray.new(raw_array)
-        end
-      end
-    end
-
-    class LessCondition < Condition
-      def initialize(column, value)
-        @column = column
-        @value = value
-      end
-
-      def !@
-        GreaterEqualCondition.new(@column, @value)
-      end
-
-      def evaluate
-        raw_array = @column.collect do |value|
-          if value.nil?
-            nil
-          else
-            @value > value
-          end
-        end
-        BooleanArray.new(raw_array)
-      end
-    end
-
-    class LessEqualCondition < Condition
-      def initialize(column, value)
-        @column = column
-        @value = value
-      end
-
-      def !@
-        GreaterCondition.new(@column, @value)
-      end
-
-      def evaluate
-        raw_array = @column.collect do |value|
-          if value.nil?
-            nil
-          else
-            @value >= value
-          end
-        end
-        BooleanArray.new(raw_array)
-      end
-    end
-
-    class GreaterCondition < Condition
-      def initialize(column, value)
-        @column = column
-        @value = value
-      end
-
-      def !@
-        LessEqualCondition.new(@column, @value)
-      end
-
-      def evaluate
-        raw_array = @column.collect do |value|
-          if value.nil?
-            nil
-          else
-            @value < value
-          end
-        end
-        BooleanArray.new(raw_array)
-      end
-    end
-
-    class GreaterEqualCondition < Condition
-      def initialize(column, value)
-        @column = column
-        @value = value
-      end
-
-      def !@
-        LessCondition.new(@column, @value)
-      end
-
-      def evaluate
-        raw_array = @column.collect do |value|
-          if value.nil?
-            nil
-          else
-            @value <= value
-          end
-        end
-        BooleanArray.new(raw_array)
-      end
-    end
-
-    class InCondition < Condition
-      def initialize(column, values)
-        @column = column
-        @values = values
-      end
-
-      def !@
-        NotInCondition.new(@column, @values)
-      end
-
-      def evaluate
-        values_index = {}
-        @values.each do |value|
-          values_index[value] = true
-        end
-        raw_array = @column.collect do |value|
-          if value.nil?
-            nil
-          else
-            values_index.key?(value)
-          end
-        end
-        BooleanArray.new(raw_array)
-      end
-    end
-
-    class NotInCondition < Condition
-      def initialize(column, values)
-        @column = column
-        @values = values
-      end
-
-      def !@
-        InCondition.new(@column, @values)
-      end
-
-      def evaluate
-        values_index = {}
-        @values.each do |value|
-          values_index[value] = true
-        end
-        raw_array = @column.collect do |value|
-          if value.nil?
-            nil
-          else
-            not values_index.key?(value)
-          end
-        end
-        BooleanArray.new(raw_array)
-      end
-    end
-
-    class SelectCondition < Condition
-      def initialize(column, block)
-        @column = column
-        @block = block
-      end
-
-      def !@
-        RejectCondition.new(@column, @block)
-      end
-
-      def evaluate
-        BooleanArray.new(@column.collect(&@block))
-      end
-    end
-
-    class RejectCondition < Condition
-      def initialize(column, block)
-        @column = column
-        @block = block
-      end
-
-      def !@
-        SelectCondition.new(@column, @block)
-      end
-
-      def evaluate
-        raw_array = @column.collect do |value|
-          evaluated_value = @block.call(value)
-          if evaluated_value.nil?
-            nil
-          else
-            not evaluated_value
-          end
-        end
-        BooleanArray.new(raw_array)
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/sort-key.rb b/ruby/red-arrow/lib/arrow/sort-key.rb
deleted file mode 100644
index 9870272..0000000
--- a/ruby/red-arrow/lib/arrow/sort-key.rb
+++ /dev/null
@@ -1,193 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class SortKey
-    class << self
-      # Ensure returning suitable {Arrow::SortKey}.
-      #
-      # @overload resolve(sort_key)
-      #
-      #   Returns the given sort key itself. This is convenient to use
-      #   this method as {Arrow::SortKey} converter.
-      #
-      #   @param sort_key [Arrow::SortKey] The sort key.
-      #
-      #   @return [Arrow::SortKey] The given sort key itself.
-      #
-      # @overload resolve(name)
-      #
-      #   Creates a new suitable sort key from column name with
-      #   leading order mark. See {#initialize} for details about
-      #   order mark.
-      #
-      #   @return [Arrow::SortKey] A new suitable sort key.
-      #
-      # @overload resolve(name, order)
-      #
-      #   Creates a new suitable sort key from column name without
-      #   leading order mark and order. See {#initialize} for details.
-      #
-      #   @return [Arrow::SortKey] A new suitable sort key.
-      #
-      # @since 4.0.0
-      def resolve(name, order=nil)
-        return name if name.is_a?(self)
-        new(name, order)
-      end
-
-      # @api private
-      def try_convert(value)
-        case value
-        when Symbol, String
-          new(value.to_s, :ascending)
-        else
-          nil
-        end
-      end
-    end
-
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-    # Creates a new {Arrow::SortKey}.
-    #
-    # @overload initialize(name)
-    #
-    #   @param name [Symbol, String] The name of the sort column.
-    #
-    #     If `name` is a String, the first character may be processed
-    #     as the "leading order mark". If the first character is `"+"`
-    #     or `"-"`, they are processed as a leading order mark. If the
-    #     first character is processed as a leading order mark, the
-    #     first character is removed from sort column name and
-    #     corresponding order is used. `"+"` uses ascending order and
-    #     `"-"` uses ascending order.
-    #
-    #     If `name` is not a String nor `name` doesn't start with the
-    #     leading order mark, sort column name is `name` as-is and
-    #     ascending order is used.
-    #
-    #   @example String without the leading order mark
-    #     key = Arrow::SortKey.new("count")
-    #     key.name  # => "count"
-    #     key.order # => Arrow::SortOrder::ASCENDING
-    #
-    #   @example String with the "+" leading order mark
-    #     key = Arrow::SortKey.new("+count")
-    #     key.name  # => "count"
-    #     key.order # => Arrow::SortOrder::ASCENDING
-    #
-    #   @example String with the "-" leading order mark
-    #     key = Arrow::SortKey.new("-count")
-    #     key.name  # => "count"
-    #     key.order # => Arrow::SortOrder::DESCENDING
-    #
-    #   @example Symbol that starts with "-"
-    #     key = Arrow::SortKey.new(:"-count")
-    #     key.name  # => "-count"
-    #     key.order # => Arrow::SortOrder::ASCENDING
-    #
-    # @overload initialize(name, order)
-    #
-    #   @param name [Symbol, String] The name of the sort column.
-    #
-    #     No leading order mark processing. The given `name` is used
-    #     as-is.
-    #
-    #   @param order [Symbol, String, Arrow::SortOrder] How to order
-    #     by this sort key.
-    #
-    #     If this is a Symbol or String, this must be `:ascending`,
-    #     `"ascending"`, `:asc`, `"asc"`, `:descending`,
-    #     `"descending"`, `:desc` or `"desc"`.
-    #
-    #   @example No leading order mark processing
-    #     key = Arrow::SortKey.new("-count", :ascending)
-    #     key.name  # => "-count"
-    #     key.order # => Arrow::SortOrder::ASCENDING
-    #
-    #   @example Order by abbreviated name with Symbol
-    #     key = Arrow::SortKey.new("count", :desc)
-    #     key.name  # => "count"
-    #     key.order # => Arrow::SortOrder::DESCENDING
-    #
-    #   @example Order by String
-    #     key = Arrow::SortKey.new("count", "descending")
-    #     key.name  # => "count"
-    #     key.order # => Arrow::SortOrder::DESCENDING
-    #
-    #   @example Order by Arrow::SortOrder
-    #     key = Arrow::SortKey.new("count", Arrow::SortOrder::DESCENDING)
-    #     key.name  # => "count"
-    #     key.order # => Arrow::SortOrder::DESCENDING
-    #
-    # @since 4.0.0
-    def initialize(name, order=nil)
-      name, order = normalize_name(name, order)
-      order = normalize_order(order) || :ascending
-      initialize_raw(name, order)
-    end
-
-    # @return [String] The string representation of this sort key. You
-    #   can use recreate {Arrow::SortKey} by
-    #   `Arrow::SortKey.new(key.to_s)`.
-    #
-    # @example Recreate Arrow::SortKey
-    #   key = Arrow::SortKey.new("-count")
-    #   key.to_s # => "-count"
-    #   key == Arrow::SortKey.new(key.to_s) # => true
-    #
-    # @since 4.0.0
-    def to_s
-      if order == SortOrder::ASCENDING
-        "+#{name}"
-      else
-        "-#{name}"
-      end
-    end
-
-    private
-    def normalize_name(name, order)
-      case name
-      when Symbol
-        return name.to_s, order
-      when String
-        return name, order if order
-        if name.start_with?("-")
-          return name[1..-1], order || :descending
-        elsif name.start_with?("+")
-          return name[1..-1], order || :ascending
-        else
-          return name, order
-        end
-      else
-        return name, order
-      end
-    end
-
-    def normalize_order(order)
-      case order
-      when :asc, "asc"
-        :ascending
-      when :desc, "desc"
-        :descending
-      else
-        order
-      end
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/sort-options.rb b/ruby/red-arrow/lib/arrow/sort-options.rb
deleted file mode 100644
index a7c2d64..0000000
--- a/ruby/red-arrow/lib/arrow/sort-options.rb
+++ /dev/null
@@ -1,109 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class SortOptions
-    class << self
-      # @api private
-      def try_convert(value)
-        case value
-        when Symbol, String
-          new(value)
-        when ::Array
-          new(*value)
-        else
-          nil
-        end
-      end
-    end
-
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-    # @param sort_keys [::Array<String, Symbol, Arrow::SortKey>] The
-    #   sort keys to be used. See {Arrow::SortKey.resolve} how to
-    #   resolve each sort key in `sort_keys`.
-    #
-    #   You can add more sort keys by {#add_sort_key} later.
-    #
-    # @example No initial sort keys
-    #   options = Arrow::SortOptions.new
-    #   options.sort_keys # => []
-    #
-    # @example String sort keys
-    #   options = Arrow::SortOptions.new("count", "-age")
-    #   options.sort_keys.collect(&:to_s) # => ["+count", "-age"]
-    #
-    # @example Symbol sort keys
-    #   options = Arrow::SortOptions.new(:count, :age)
-    #   options.sort_keys.collect(&:to_s) # => ["+count", "+age"]
-    #
-    # @example Mixed sort keys
-    #   options = Arrow::SortOptions.new(:count, "-age")
-    #   options.sort_keys.collect(&:to_s) # => ["+count", "-age"]
-    #
-    # @since 4.0.0
-    def initialize(*sort_keys)
-      initialize_raw
-      sort_keys.each do |sort_key|
-        add_sort_key(sort_key)
-      end
-    end
-
-    # @api private
-    alias_method :add_sort_key_raw, :add_sort_key
-    # Add a sort key.
-    #
-    # @return [void]
-    #
-    # @overload add_sort_key(key)
-    #
-    #   @param key [Arrow::SortKey] The sort key to be added.
-    #
-    #   @example Add a key to sort by "price" column in descending order
-    #     options = Arrow::SortOptions.new
-    #     options.add_sort_key(Arrow::SortKey.new(:price, :descending))
-    #     options.sort_keys.collect(&:to_s) # => ["-price"]
-    #
-    # @overload add_sort_key(name)
-    #
-    #   @param name [Symbol, String] The sort key name to be
-    #     added. See also {Arrow::SortKey#initialize} for the leading
-    #     order mark for String name.
-    #
-    #   @example Add a key to sort by "price" column in descending order
-    #     options = Arrow::SortOptions.new
-    #     options.add_sort_key("-price")
-    #     options.sort_keys.collect(&:to_s) # => ["-price"]
-    #
-    # @overload add_sort_key(name, order)
-    #
-    #   @param name [Symbol, String] The sort key name.
-    #
-    #   @param order [Symbol, String, Arrow::SortOrder] The sort
-    #     order. See {Arrow::SortKey#initialize} for details.
-    #
-    #   @example Add a key to sort by "price" column in descending order
-    #     options = Arrow::SortOptions.new
-    #     options.add_sort_key("price", :desc)
-    #     options.sort_keys.collect(&:to_s) # => ["-price"]
-    #
-    # @since 4.0.0
-    def add_sort_key(name, order=nil)
-      add_sort_key_raw(SortKey.resolve(name, order))
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/sparse-union-data-type.rb b/ruby/red-arrow/lib/arrow/sparse-union-data-type.rb
deleted file mode 100644
index 14f3e5a..0000000
--- a/ruby/red-arrow/lib/arrow/sparse-union-data-type.rb
+++ /dev/null
@@ -1,90 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class SparseUnionDataType
-    alias_method :initialize_raw, :initialize
-    private :initialize_raw
-
-    # Creates a new {Arrow::SparseUnionDataType}.
-    #
-    # @overload initialize(fields, type_codes)
-    #
-    #   @param fields [::Array<Arrow::Field, Hash>] The fields of the
-    #     sparse union data type. You can mix {Arrow::Field} and field
-    #     description in the fields.
-    #
-    #     See {Arrow::Field.new} how to specify field description.
-    #
-    #   @param type_codes [::Array<Integer>] The IDs that indicates
-    #     corresponding fields.
-    #
-    #   @example Create a sparse union data type for `{2: visible, 9: count}`
-    #     fields = [
-    #       Arrow::Field.new("visible", :boolean),
-    #       {
-    #         name: "count",
-    #         type: :int32,
-    #       },
-    #     ]
-    #     Arrow::SparseUnionDataType.new(fields, [2, 9])
-    #
-    # @overload initialize(description)
-    #
-    #   @param description [Hash] The description of the sparse union
-    #     data type. It must have `:fields` and `:type_codes` values.
-    #
-    #   @option description [::Array<Arrow::Field, Hash>] :fields The
-    #     fields of the sparse union data type. You can mix
-    #     {Arrow::Field} and field description in the fields.
-    #
-    #     See {Arrow::Field.new} how to specify field description.
-    #
-    #   @option description [::Array<Integer>] :type_codes The IDs
-    #     that indicates corresponding fields.
-    #
-    #   @example Create a sparse union data type for `{2: visible, 9: count}`
-    #     fields = [
-    #       Arrow::Field.new("visible", :boolean),
-    #       {
-    #         name: "count",
-    #         type: :int32,
-    #       },
-    #     ]
-    #     Arrow::SparseUnionDataType.new(fields: fields,
-    #                                    type_codes: [2, 9])
-    def initialize(*args)
-      n_args = args.size
-      case n_args
-      when 1
-        description = args[0]
-        fields = description[:fields]
-        type_codes = description[:type_codes]
-      when 2
-        fields, type_codes = args
-      else
-        message = "wrong number of arguments (given, #{n_args}, expected 1..2)"
-        raise ArgumentError, message
-      end
-      fields = fields.collect do |field|
-        field = Field.new(field) unless field.is_a?(Field)
-        field
-      end
-      initialize_raw(fields, type_codes)
-    end
-  end
-end
diff --git a/ruby/red-arrow/lib/arrow/struct-array-builder.rb b/ruby/red-arrow/lib/arrow/struct-array-builder.rb
deleted file mode 100644
index ce88316..0000000
--- a/ruby/red-arrow/lib/arrow/struct-array-builder.rb
+++ /dev/null
@@ -1,146 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-module Arrow
-  class StructArrayBuilder
-    class << self
-      def build(data_type, values)
-        builder = new(data_type)
-        builder.build(values)
-      end
-    end
-
-    def [](index_or_name)
-      find_field_builder(index_or_name)
-    end
-
-    def find_field_builder(index_or_name)
-      case index_or_name
-      when String, Symbol
-        name = index_or_name
-        cached_name_to_builder[name.to_s]
-      else
-        index = index_or_name
-        cached_field_builders[index]
-      end
-    end
-
-    alias_method :append_value_raw, :append_value
-
-    # @overload append_value
-    #
-    #   Starts appending a struct record. You need to append values of
-    #   fields.
-    #
-    # @overload append_value(value)
-    #
-    #   Appends a struct record including values of fields.
-    #
-    #   @param value [nil, ::Array, Hash] The struct record value.
-    #
-    #     If this is `nil`, the struct record is null.
-    #
-    #     If this is `Array` or `Hash`, they are values of fields.
-    #
-    # @since 0.12.0
-    def append_value(*args)
-      n_args = args.size
-
-      case n_args
-      when 0
-        append_value_raw
-      when 1
-        value = args[0]
-        case value
-        when nil
... 17016 lines suppressed ...

[arrow-rs] 03/14: Removed Python.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit 4d14b301cda523d363e17ee5d03a581675915a32
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:19:46 2021 +0000

    Removed Python.
---
 python/.coveragerc                                 |   19 -
 python/.flake8.cython                              |   20 -
 python/.gitignore                                  |   45 -
 python/CMakeLists.txt                              |  619 ---
 python/MANIFEST.in                                 |   15 -
 python/README.md                                   |   59 -
 python/asv-build.sh                                |   75 -
 python/asv-install.sh                              |   21 -
 python/asv-uninstall.sh                            |   21 -
 python/asv.conf.json                               |  187 -
 python/benchmarks/__init__.py                      |   16 -
 python/benchmarks/array_ops.py                     |   34 -
 python/benchmarks/common.py                        |  349 --
 python/benchmarks/convert_builtins.py              |   87 -
 python/benchmarks/convert_pandas.py                |  121 -
 python/benchmarks/io.py                            |   89 -
 python/benchmarks/microbenchmarks.py               |   45 -
 python/benchmarks/parquet.py                       |  156 -
 python/benchmarks/plasma.py                        |   72 -
 python/benchmarks/streaming.py                     |   70 -
 python/cmake_modules                               |    1 -
 python/examples/flight/client.py                   |  189 -
 python/examples/flight/middleware.py               |  167 -
 python/examples/flight/server.py                   |  154 -
 python/examples/minimal_build/Dockerfile.fedora    |   31 -
 python/examples/minimal_build/Dockerfile.ubuntu    |   38 -
 python/examples/minimal_build/README.md            |   73 -
 python/examples/minimal_build/build_conda.sh       |  119 -
 python/examples/minimal_build/build_venv.sh        |   84 -
 python/examples/plasma/sorting/multimerge.pyx      |  102 -
 python/examples/plasma/sorting/setup.py            |   27 -
 python/examples/plasma/sorting/sort_df.py          |  203 -
 python/pyarrow/__init__.pxd                        |   42 -
 python/pyarrow/__init__.py                         |  504 ---
 python/pyarrow/_compute.pxd                        |   27 -
 python/pyarrow/_compute.pyx                        | 1092 -----
 python/pyarrow/_csv.pxd                            |   46 -
 python/pyarrow/_csv.pyx                            |  952 -----
 python/pyarrow/_cuda.pxd                           |   67 -
 python/pyarrow/_cuda.pyx                           | 1059 -----
 python/pyarrow/_dataset.pyx                        | 2977 -------------
 python/pyarrow/_flight.pyx                         | 2578 ------------
 python/pyarrow/_fs.pxd                             |   94 -
 python/pyarrow/_fs.pyx                             | 1088 -----
 python/pyarrow/_hdfs.pyx                           |  141 -
 python/pyarrow/_json.pyx                           |  249 --
 python/pyarrow/_orc.pxd                            |   53 -
 python/pyarrow/_orc.pyx                            |  111 -
 python/pyarrow/_parquet.pxd                        |  553 ---
 python/pyarrow/_parquet.pyx                        | 1435 -------
 python/pyarrow/_plasma.pyx                         |  868 ----
 python/pyarrow/_s3fs.pyx                           |  260 --
 python/pyarrow/array.pxi                           | 2387 -----------
 python/pyarrow/benchmark.pxi                       |   20 -
 python/pyarrow/benchmark.py                        |   21 -
 python/pyarrow/builder.pxi                         |   82 -
 python/pyarrow/cffi.py                             |   71 -
 python/pyarrow/compat.pxi                          |   65 -
 python/pyarrow/compat.py                           |   29 -
 python/pyarrow/compute.py                          |  493 ---
 python/pyarrow/config.pxi                          |   74 -
 python/pyarrow/csv.py                              |   22 -
 python/pyarrow/cuda.py                             |   25 -
 python/pyarrow/dataset.py                          |  779 ----
 python/pyarrow/error.pxi                           |  231 --
 python/pyarrow/feather.pxi                         |  105 -
 python/pyarrow/feather.py                          |  262 --
 python/pyarrow/filesystem.py                       |  511 ---
 python/pyarrow/flight.py                           |   63 -
 python/pyarrow/fs.py                               |  326 --
 python/pyarrow/gandiva.pyx                         |  482 ---
 python/pyarrow/hdfs.py                             |  240 --
 python/pyarrow/includes/__init__.pxd               |    0
 python/pyarrow/includes/common.pxd                 |  137 -
 python/pyarrow/includes/libarrow.pxd               | 2356 -----------
 python/pyarrow/includes/libarrow_cuda.pxd          |  107 -
 python/pyarrow/includes/libarrow_dataset.pxd       |  384 --
 python/pyarrow/includes/libarrow_flight.pxd        |  552 ---
 python/pyarrow/includes/libarrow_fs.pxd            |  268 --
 python/pyarrow/includes/libgandiva.pxd             |  281 --
 python/pyarrow/includes/libplasma.pxd              |   25 -
 python/pyarrow/io-hdfs.pxi                         |  470 ---
 python/pyarrow/io.pxi                              | 1896 ---------
 python/pyarrow/ipc.pxi                             |  968 -----
 python/pyarrow/ipc.py                              |  233 --
 python/pyarrow/json.py                             |   19 -
 python/pyarrow/jvm.py                              |  335 --
 python/pyarrow/lib.pxd                             |  597 ---
 python/pyarrow/lib.pyx                             |  158 -
 python/pyarrow/memory.pxi                          |  216 -
 python/pyarrow/orc.py                              |  149 -
 python/pyarrow/pandas-shim.pxi                     |  254 --
 python/pyarrow/pandas_compat.py                    | 1226 ------
 python/pyarrow/parquet.py                          | 2076 ---------
 python/pyarrow/plasma.py                           |  152 -
 python/pyarrow/public-api.pxi                      |  418 --
 python/pyarrow/scalar.pxi                          |  927 -----
 python/pyarrow/serialization.pxi                   |  556 ---
 python/pyarrow/serialization.py                    |  504 ---
 python/pyarrow/table.pxi                           | 2266 ----------
 python/pyarrow/tensor.pxi                          |  892 ----
 python/pyarrow/tensorflow/plasma_op.cc             |  391 --
 python/pyarrow/tests/__init__.py                   |    0
 python/pyarrow/tests/arrow_7980.py                 |   30 -
 python/pyarrow/tests/conftest.py                   |  277 --
 .../v0.17.0.version=2-compression=lz4.feather      |  Bin 594 -> 0 bytes
 python/pyarrow/tests/data/orc/README.md            |   22 -
 .../tests/data/orc/TestOrcFile.emptyFile.jsn.gz    |  Bin 50 -> 0 bytes
 .../tests/data/orc/TestOrcFile.emptyFile.orc       |  Bin 523 -> 0 bytes
 .../tests/data/orc/TestOrcFile.test1.jsn.gz        |  Bin 323 -> 0 bytes
 .../pyarrow/tests/data/orc/TestOrcFile.test1.orc   |  Bin 1711 -> 0 bytes
 .../tests/data/orc/TestOrcFile.testDate1900.jsn.gz |  Bin 182453 -> 0 bytes
 .../tests/data/orc/TestOrcFile.testDate1900.orc    |  Bin 30941 -> 0 bytes
 python/pyarrow/tests/data/orc/decimal.jsn.gz       |  Bin 19313 -> 0 bytes
 python/pyarrow/tests/data/orc/decimal.orc          |  Bin 16337 -> 0 bytes
 .../data/parquet/v0.7.1.all-named-index.parquet    |  Bin 3948 -> 0 bytes
 .../v0.7.1.column-metadata-handling.parquet        |  Bin 2012 -> 0 bytes
 python/pyarrow/tests/data/parquet/v0.7.1.parquet   |  Bin 4372 -> 0 bytes
 .../data/parquet/v0.7.1.some-named-index.parquet   |  Bin 4008 -> 0 bytes
 python/pyarrow/tests/deserialize_buffer.py         |   26 -
 python/pyarrow/tests/pandas_examples.py            |  172 -
 python/pyarrow/tests/pandas_threaded_import.py     |   44 -
 python/pyarrow/tests/parquet/common.py             |  177 -
 python/pyarrow/tests/parquet/conftest.py           |   87 -
 python/pyarrow/tests/parquet/test_basic.py         |  586 ---
 .../tests/parquet/test_compliant_nested_type.py    |  113 -
 python/pyarrow/tests/parquet/test_data_types.py    |  524 ---
 python/pyarrow/tests/parquet/test_dataset.py       | 1588 -------
 python/pyarrow/tests/parquet/test_datetime.py      |  373 --
 python/pyarrow/tests/parquet/test_metadata.py      |  477 ---
 python/pyarrow/tests/parquet/test_pandas.py        |  687 ---
 python/pyarrow/tests/parquet/test_parquet_file.py  |  258 --
 .../pyarrow/tests/parquet/test_parquet_writer.py   |  275 --
 python/pyarrow/tests/pyarrow_cython_example.pyx    |   55 -
 python/pyarrow/tests/strategies.py                 |  414 --
 python/pyarrow/tests/test_adhoc_memory_leak.py     |   43 -
 python/pyarrow/tests/test_array.py                 | 2680 ------------
 python/pyarrow/tests/test_builder.py               |   67 -
 python/pyarrow/tests/test_cffi.py                  |  295 --
 python/pyarrow/tests/test_compute.py               | 1243 ------
 python/pyarrow/tests/test_convert_builtin.py       | 2156 ----------
 python/pyarrow/tests/test_csv.py                   | 1345 ------
 python/pyarrow/tests/test_cuda.py                  |  792 ----
 python/pyarrow/tests/test_cuda_numba_interop.py    |  235 --
 python/pyarrow/tests/test_cython.py                |  143 -
 python/pyarrow/tests/test_dataset.py               | 3158 --------------
 python/pyarrow/tests/test_deprecations.py          |   23 -
 python/pyarrow/tests/test_extension_type.py        |  668 ---
 python/pyarrow/tests/test_feather.py               |  792 ----
 python/pyarrow/tests/test_filesystem.py            |   67 -
 python/pyarrow/tests/test_flight.py                | 1808 --------
 python/pyarrow/tests/test_fs.py                    | 1521 -------
 python/pyarrow/tests/test_gandiva.py               |  365 --
 python/pyarrow/tests/test_hdfs.py                  |  442 --
 python/pyarrow/tests/test_io.py                    | 1754 --------
 python/pyarrow/tests/test_ipc.py                   |  962 -----
 python/pyarrow/tests/test_json.py                  |  310 --
 python/pyarrow/tests/test_jvm.py                   |  433 --
 python/pyarrow/tests/test_memory.py                |  156 -
 python/pyarrow/tests/test_misc.py                  |  175 -
 python/pyarrow/tests/test_orc.py                   |  165 -
 python/pyarrow/tests/test_pandas.py                | 4383 --------------------
 python/pyarrow/tests/test_plasma.py                | 1073 -----
 python/pyarrow/tests/test_plasma_tf_op.py          |  104 -
 python/pyarrow/tests/test_scalars.py               |  625 ---
 python/pyarrow/tests/test_schema.py                |  721 ----
 python/pyarrow/tests/test_serialization.py         | 1233 ------
 .../pyarrow/tests/test_serialization_deprecated.py |   56 -
 python/pyarrow/tests/test_sparse_tensor.py         |  491 ---
 python/pyarrow/tests/test_strategies.py            |   61 -
 python/pyarrow/tests/test_table.py                 | 1687 --------
 python/pyarrow/tests/test_tensor.py                |  215 -
 python/pyarrow/tests/test_types.py                 | 1041 -----
 python/pyarrow/tests/util.py                       |  231 --
 python/pyarrow/types.pxi                           | 2781 -------------
 python/pyarrow/types.py                            |  357 --
 python/pyarrow/util.py                             |  152 -
 python/pyarrow/vendored/__init__.py                |   16 -
 python/pyarrow/vendored/version.py                 |  545 ---
 python/pyproject.toml                              |   26 -
 python/requirements-build.txt                      |    4 -
 python/requirements-test.txt                       |    7 -
 python/requirements-wheel-build.txt                |    6 -
 python/requirements-wheel-test.txt                 |   11 -
 python/scripts/test_imports.py                     |   21 -
 python/scripts/test_leak.py                        |  110 -
 python/setup.cfg                                   |   34 -
 python/setup.py                                    |  628 ---
 188 files changed, 87207 deletions(-)

diff --git a/python/.coveragerc b/python/.coveragerc
deleted file mode 100644
index f5dc6e3..0000000
--- a/python/.coveragerc
+++ /dev/null
@@ -1,19 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-[run]
-plugins = Cython.Coverage
diff --git a/python/.flake8.cython b/python/.flake8.cython
deleted file mode 100644
index 4bc1958..0000000
--- a/python/.flake8.cython
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-[flake8]
-filename = *.pyx,*.pxd,*.pxi
-ignore = E211,E901,E999,E225,E226,E227,W504
diff --git a/python/.gitignore b/python/.gitignore
deleted file mode 100644
index ef1237a..0000000
--- a/python/.gitignore
+++ /dev/null
@@ -1,45 +0,0 @@
-thirdparty/
-CMakeFiles/
-CMakeCache.txt
-CTestTestfile.cmake
-Makefile
-cmake_install.cmake
-build/
-Testing/
-
-# Python stuff
-
-# Editor temporary/working/backup files
-*flymake*
-
-# Generated sources
-*.c
-*.cpp
-pyarrow/*_api.h
-pyarrow/_generated_version.py
-
-# Bundled headers
-pyarrow/include
-
-# setup.py working directory
-build
-# setup.py dist directory
-dist
-# Coverage
-.coverage
-coverage.xml
-htmlcov
-# Cache
-.cache
-
-# benchmark working dir
-.asv
-pyarrow/_table_api.h
-
-# manylinux temporary files
-manylinux1/arrow
-nm_arrow.log
-visible_symbols.log
-
-# plasma store
-pyarrow/plasma-store-server
diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt
deleted file mode 100644
index 0714aa4..0000000
--- a/python/CMakeLists.txt
+++ /dev/null
@@ -1,619 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more cod ntributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# Includes code assembled from BSD/MIT/Apache-licensed code from some 3rd-party
-# projects, including Kudu, Impala, and libdynd. See python/LICENSE.txt
-
-cmake_minimum_required(VERSION 3.2)
-project(pyarrow)
-
-# Running from a Python sdist tarball
-set(LOCAL_CMAKE_MODULES "${CMAKE_SOURCE_DIR}/cmake_modules")
-if(EXISTS "${LOCAL_CMAKE_MODULES}")
-  set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${LOCAL_CMAKE_MODULES})
-endif()
-
-# Running from a git source tree
-set(CPP_CMAKE_MODULES "${CMAKE_SOURCE_DIR}/../cpp/cmake_modules")
-if(EXISTS "${CPP_CMAKE_MODULES}")
-  set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CPP_CMAKE_MODULES})
-endif()
-
-include(CMakeParseArguments)
-
-# Only interpret if() arguments as variables or keywords when unquoted.
-# https://www.cmake.org/cmake/help/latest/policy/CMP0054.html
-cmake_policy(SET CMP0054 NEW)
-
-# Use the first Python installation on PATH, not the newest one
-set(Python3_FIND_STRATEGY "LOCATION")
-# On Windows, use registry last, not first
-set(Python3_FIND_REGISTRY "LAST")
-# On macOS, use framework last, not first
-set(Python3_FIND_FRAMEWORK "LAST")
-
-# Allow "make install" to not depend on all targets.
-#
-# Must be declared in the top-level CMakeLists.txt.
-set(CMAKE_SKIP_INSTALL_ALL_DEPENDENCY true)
-
-set(CMAKE_MACOSX_RPATH 1)
-if(DEFINED ENV{MACOSX_DEPLOYMENT_TARGET})
-  set(CMAKE_OSX_DEPLOYMENT_TARGET $ENV{MACOSX_DEPLOYMENT_TARGET})
-else()
-  set(CMAKE_OSX_DEPLOYMENT_TARGET 10.9)
-endif()
-
-# Generate a Clang compile_commands.json "compilation database" file for use
-# with various development tools, such as Vim's YouCompleteMe plugin.
-# See http://clang.llvm.org/docs/JSONCompilationDatabase.html
-if("$ENV{CMAKE_EXPORT_COMPILE_COMMANDS}" STREQUAL "1")
-  set(CMAKE_EXPORT_COMPILE_COMMANDS 1)
-endif()
-
-# Top level cmake dir
-if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_CURRENT_SOURCE_DIR}")
-  option(PYARROW_BUILD_CUDA "Build the PyArrow CUDA support" OFF)
-  option(PYARROW_BUILD_FLIGHT "Build the PyArrow Flight integration" OFF)
-  option(PYARROW_BUILD_DATASET "Build the PyArrow Dataset integration" OFF)
-  option(PYARROW_BUILD_GANDIVA "Build the PyArrow Gandiva integration" OFF)
-  option(PYARROW_BUILD_PARQUET "Build the PyArrow Parquet integration" OFF)
-  option(PYARROW_PARQUET_USE_SHARED "Rely on parquet shared libraries where relevant" ON)
-  option(PYARROW_BOOST_USE_SHARED
-         "Rely on boost shared libraries on linking static parquet" ON)
-  option(PYARROW_BUILD_PLASMA "Build the PyArrow Plasma integration" OFF)
-  option(PYARROW_USE_TENSORFLOW "Build PyArrow with TensorFlow support" OFF)
-  option(PYARROW_BUILD_ORC "Build the PyArrow ORC integration" OFF)
-  option(PYARROW_BUNDLE_ARROW_CPP "Bundle the Arrow C++ libraries" OFF)
-  option(PYARROW_BUNDLE_BOOST "Bundle the Boost libraries when we bundle Arrow C++" OFF)
-  option(PYARROW_GENERATE_COVERAGE "Build with Cython code coverage enabled" OFF)
-  set(PYARROW_CXXFLAGS "" CACHE STRING "Compiler flags to append when compiling Arrow")
-endif()
-
-find_program(CCACHE_FOUND ccache)
-if(CCACHE_FOUND)
-  set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
-  set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
-endif(CCACHE_FOUND)
-
-#
-# Compiler flags
-#
-
-include(BuildUtils)
-
-# Cython generated code emits way to many warnings at CHECKIN and EVERYTHING
-set(BUILD_WARNING_LEVEL "PRODUCTION")
-
-# This must be synchronized with the definition in
-# cpp/cmake_modules/DefineOptions.cmake.
-set(ARROW_ARMV8_ARCH
-    "armv8-a"
-    CACHE STRING "Arm64 arch and extensions: armv8-a, armv8-a or armv8-a+crc+crypto")
-include(SetupCxxFlags)
-
-# Add common flags
-set(CMAKE_CXX_FLAGS "${CXX_COMMON_FLAGS} ${CMAKE_CXX_FLAGS}")
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${PYARROW_CXXFLAGS}")
-
-if(MSVC)
-  # MSVC version of -Wno-return-type-c-linkage
-  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4190")
-
-  # Cython generates some bitshift expressions that MSVC does not like in
-  # __Pyx_PyFloat_DivideObjC
-  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4293")
-
-  # Converting to/from C++ bool is pretty wonky in Cython. The C4800 warning
-  # seem harmless, and probably not worth the effort of working around it
-  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4800")
-
-  # See https://github.com/cython/cython/issues/2731. Change introduced in
-  # Cython 0.29.1 causes "unsafe use of type 'bool' in operation"
-  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4804")
-else()
-  # Enable perf and other tools to work properly
-  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-omit-frame-pointer")
-
-  # Suppress Cython warnings
-  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-variable -Wno-maybe-uninitialized")
-
-  if(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang"
-     OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
-    # Cython warnings in clang
-    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-parentheses-equality")
-    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-constant-logical-operand")
-    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-missing-declarations")
-    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-sometimes-uninitialized")
-
-    # We have public Cython APIs which return C++ types, which are in an extern
-    # "C" blog (no symbol mangling) and clang doesn't like this
-    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-return-type-c-linkage")
-  endif()
-endif()
-
-# For any C code, use the same flags.
-set(CMAKE_C_FLAGS "${CMAKE_CXX_FLAGS}")
-
-# Add C++-only flags, like -std=c++11
-set(CMAKE_CXX_FLAGS "${CXX_ONLY_FLAGS} ${CMAKE_CXX_FLAGS}")
-
-if(MSVC)
-  # MSVC makes its own output directories based on the build configuration
-  set(BUILD_SUBDIR_NAME "")
-else()
-  # Set compile output directory
-  string(TOLOWER ${CMAKE_BUILD_TYPE} BUILD_SUBDIR_NAME)
-endif()
-
-# If build in-source, create the latest symlink. If build out-of-source, which is
-# preferred, simply output the binaries in the build folder
-if(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_CURRENT_BINARY_DIR})
-  set(BUILD_OUTPUT_ROOT_DIRECTORY
-      "${CMAKE_CURRENT_BINARY_DIR}/build/${BUILD_SUBDIR_NAME}")
-  # Link build/latest to the current build directory, to avoid developers
-  # accidentally running the latest debug build when in fact they're building
-  # release builds.
-  file(MAKE_DIRECTORY ${BUILD_OUTPUT_ROOT_DIRECTORY})
-  if(NOT APPLE)
-    set(MORE_ARGS "-T")
-  endif()
-  execute_process(COMMAND ln
-                          ${MORE_ARGS}
-                          -sf
-                          ${BUILD_OUTPUT_ROOT_DIRECTORY}
-                          ${CMAKE_CURRENT_BINARY_DIR}/build/latest)
-else()
-  set(BUILD_OUTPUT_ROOT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${BUILD_SUBDIR_NAME}")
-endif()
-
-message(STATUS "Generator: ${CMAKE_GENERATOR}")
-message(STATUS "Build output directory: ${BUILD_OUTPUT_ROOT_DIRECTORY}")
-
-# where to put generated archives (.a files)
-set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-set(ARCHIVE_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-
-# where to put generated libraries (.so files)
-set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-set(LIBRARY_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-
-# where to put generated binaries
-set(EXECUTABLE_OUTPUT_PATH "${BUILD_OUTPUT_ROOT_DIRECTORY}")
-
-if(PYARROW_USE_TENSORFLOW)
-  # TensorFlow uses the old GLIBCXX ABI, so we have to use it too
-  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_CXX11_ABI=0")
-endif()
-
-# Python and Numpy libraries
-find_package(Python3Alt REQUIRED)
-include(UseCython)
-
-include_directories(SYSTEM ${NUMPY_INCLUDE_DIRS} ${PYTHON_INCLUDE_DIRS} src)
-
-#
-# Dependencies
-#
-
-if(PYARROW_BUILD_FLIGHT)
-  set(ARROW_FLIGHT TRUE)
-endif()
-
-# Arrow
-find_package(ArrowPython REQUIRED)
-include_directories(SYSTEM ${ARROW_INCLUDE_DIR})
-
-function(bundle_arrow_lib library_path)
-  set(options)
-  set(one_value_args SO_VERSION)
-  set(multi_value_args)
-  cmake_parse_arguments(ARG
-                        "${options}"
-                        "${one_value_args}"
-                        "${multi_value_args}"
-                        ${ARGN})
-  if(ARG_UNPARSED_ARGUMENTS)
-    message(SEND_ERROR "Error: unrecognized arguments: ${ARG_UNPARSED_ARGUMENTS}")
-  endif()
-
-  get_filename_component(LIBRARY_DIR ${${library_path}} DIRECTORY)
-  get_filename_component(LIBRARY_NAME ${${library_path}} NAME_WE)
-
-  # Only copy the shared library with ABI version on Linux and macOS
-
-  if(MSVC)
-    configure_file(
-      ${${library_path}}
-      ${BUILD_OUTPUT_ROOT_DIRECTORY}/${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}
-      COPYONLY)
-  elseif(APPLE)
-    configure_file(
-      ${LIBRARY_DIR}/${LIBRARY_NAME}.${ARG_SO_VERSION}${CMAKE_SHARED_LIBRARY_SUFFIX}
-      ${BUILD_OUTPUT_ROOT_DIRECTORY}/${LIBRARY_NAME}.${ARG_SO_VERSION}${CMAKE_SHARED_LIBRARY_SUFFIX}
-      COPYONLY)
-  else()
-    configure_file(
-      ${${library_path}}.${ARG_SO_VERSION}
-      ${BUILD_OUTPUT_ROOT_DIRECTORY}/${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}.${ARG_SO_VERSION}
-      COPYONLY)
-  endif()
-
-endfunction(bundle_arrow_lib)
-
-function(bundle_arrow_import_lib library_path)
-  get_filename_component(LIBRARY_DIR ${${library_path}} DIRECTORY)
-  get_filename_component(LIBRARY_NAME ${${library_path}} NAME_WE)
-  configure_file(${${library_path}} ${BUILD_OUTPUT_ROOT_DIRECTORY}/${LIBRARY_NAME}.lib
-                 COPYONLY)
-endfunction(bundle_arrow_import_lib)
-
-function(bundle_boost_lib library_path)
-  get_filename_component(LIBRARY_NAME ${${library_path}} NAME)
-  get_filename_component(LIBRARY_NAME_WE ${${library_path}} NAME_WE)
-  configure_file(${${library_path}} ${BUILD_OUTPUT_ROOT_DIRECTORY}/${LIBRARY_NAME}
-                 COPYONLY)
-  set(Boost_SO_VERSION
-      "${Boost_MAJOR_VERSION}.${Boost_MINOR_VERSION}.${Boost_SUBMINOR_VERSION}")
-  if(APPLE)
-    configure_file(
-      ${${library_path}}
-      ${BUILD_OUTPUT_ROOT_DIRECTORY}/${LIBRARY_NAME_WE}${CMAKE_SHARED_LIBRARY_SUFFIX}
-      COPYONLY)
-  else()
-    configure_file(
-      ${${library_path}}
-      ${BUILD_OUTPUT_ROOT_DIRECTORY}/${LIBRARY_NAME_WE}${CMAKE_SHARED_LIBRARY_SUFFIX}.${Boost_SO_VERSION}
-      COPYONLY)
-  endif()
-endfunction()
-
-function(bundle_arrow_dependency library_name)
-  if(MSVC)
-    if(DEFINED ENV{CONDA_PREFIX})
-      file(TO_CMAKE_PATH "$ENV{CONDA_PREFIX}\\Library" SHARED_LIB_HOME)
-    endif()
-  else()
-    if(DEFINED ENV{CONDA_PREFIX})
-      file(TO_CMAKE_PATH "$ENV{CONDA_PREFIX}" SHARED_LIB_HOME)
-    endif()
-  endif()
-  if(DEFINED ENV{${library_name}_HOME})
-    file(TO_CMAKE_PATH "$ENV{${library_name}_HOME}" SHARED_LIB_HOME)
-  endif()
-  arrow_build_shared_library_name(shared_lib_name "${library_name}")
-  unset(SHARED_LIB_PATH CACHE)
-  if(MSVC)
-    set(CMAKE_SHARED_LIBRARY_SUFFIXES_ORIGINAL ${CMAKE_FIND_LIBRARY_SUFFIXES})
-    # .dll isn't found by find_library with MSVC because .dll isn't included in
-    # CMAKE_FIND_LIBRARY_SUFFIXES.
-    list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${CMAKE_SHARED_LIBRARY_SUFFIX}")
-  endif()
-  if(SHARED_LIB_HOME)
-    find_library(SHARED_LIB_PATH
-                 NAMES "${shared_lib_name}"
-                 PATHS "${SHARED_LIB_HOME}"
-                 PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES}
-                 NO_DEFAULT_PATH)
-  else()
-    find_library(SHARED_LIB_PATH
-                 NAMES "${shared_lib_name}"
-                 PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES})
-  endif()
-  if(MSVC)
-    set(CMAKE_SHARED_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_ORIGINAL})
-  endif()
-  if(SHARED_LIB_PATH)
-    get_filename_component(SHARED_LIB_REALPATH ${SHARED_LIB_PATH} REALPATH)
-    get_filename_component(SHARED_LIB_NAME ${SHARED_LIB_PATH} NAME)
-    message(
-      STATUS
-        "Bundle dependency ${library_name}: ${SHARED_LIB_REALPATH} as ${SHARED_LIB_NAME}")
-    configure_file(${SHARED_LIB_REALPATH}
-                   ${BUILD_OUTPUT_ROOT_DIRECTORY}/${SHARED_LIB_NAME} COPYONLY)
-  else()
-    message(FATAL_ERROR "Unable to bundle dependency: ${library_name}")
-  endif()
-endfunction()
-
-# Always bundle includes
-file(COPY ${ARROW_INCLUDE_DIR}/arrow DESTINATION ${BUILD_OUTPUT_ROOT_DIRECTORY}/include)
-
-if(PYARROW_BUNDLE_ARROW_CPP)
-  # arrow
-  bundle_arrow_lib(ARROW_SHARED_LIB SO_VERSION ${ARROW_SO_VERSION})
-  bundle_arrow_lib(ARROW_PYTHON_SHARED_LIB SO_VERSION ${ARROW_SO_VERSION})
-
-  # boost
-  if(PYARROW_BOOST_USE_SHARED AND PYARROW_BUNDLE_BOOST)
-    set(Boost_USE_STATIC_LIBS OFF)
-    set(Boost_USE_MULTITHREADED ON)
-    if(MSVC AND ARROW_USE_STATIC_CRT)
-      set(Boost_USE_STATIC_RUNTIME ON)
-    endif()
-    set(Boost_ADDITIONAL_VERSIONS
-        "1.66.0"
-        "1.66"
-        "1.65.0"
-        "1.65"
-        "1.64.0"
-        "1.64"
-        "1.63.0"
-        "1.63"
-        "1.62.0"
-        "1.61"
-        "1.61.0"
-        "1.62"
-        "1.60.0"
-        "1.60")
-    list(GET Boost_ADDITIONAL_VERSIONS 0 BOOST_LATEST_VERSION)
-    string(REPLACE "." "_" BOOST_LATEST_VERSION_IN_PATH ${BOOST_LATEST_VERSION})
-    if(MSVC)
-      # disable autolinking in boost
-      add_definitions(-DBOOST_ALL_NO_LIB)
-    endif()
-    find_package(Boost COMPONENTS regex REQUIRED)
-    bundle_boost_lib(Boost_REGEX_LIBRARY)
-  endif()
-
-  if(MSVC)
-    # TODO(kszucs): locate msvcp140.dll in a portable fashion and bundle it
-    bundle_arrow_import_lib(ARROW_IMPORT_LIB)
-    bundle_arrow_import_lib(ARROW_PYTHON_IMPORT_LIB)
-  endif()
-endif()
-
-#
-# Subdirectories
-#
-
-if(UNIX)
-  set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
-endif()
-
-set(CYTHON_EXTENSIONS
-    lib
-    _fs
-    _compute
-    _csv
-    _json)
-
-set(LINK_LIBS arrow_shared arrow_python_shared)
-
-if(PYARROW_BUILD_S3)
-  set(CYTHON_EXTENSIONS ${CYTHON_EXTENSIONS} _s3fs)
-endif()
-
-if(PYARROW_BUILD_HDFS)
-  set(CYTHON_EXTENSIONS ${CYTHON_EXTENSIONS} _hdfs)
-endif()
-
-if(PYARROW_BUILD_CUDA)
-  # Arrow CUDA
-  find_package(ArrowCUDA REQUIRED)
-
-  if(PYARROW_BUNDLE_ARROW_CPP)
-    bundle_arrow_lib(ARROW_CUDA_SHARED_LIB SO_VERSION ${ARROW_SO_VERSION})
-    if(MSVC)
-      bundle_arrow_import_lib(ARROW_CUDA_IMPORT_LIB)
-    endif()
-  endif()
-  set(CUDA_LINK_LIBS arrow_cuda_shared)
-  set(CYTHON_EXTENSIONS ${CYTHON_EXTENSIONS} _cuda)
-endif()
-
-# Dataset
-if(PYARROW_BUILD_DATASET)
-  # Arrow Dataset
-  find_package(ArrowDataset REQUIRED)
-
-  if(PYARROW_BUNDLE_ARROW_CPP)
-    bundle_arrow_lib(ARROW_DATASET_SHARED_LIB SO_VERSION ${ARROW_SO_VERSION})
-    if(MSVC)
-      bundle_arrow_import_lib(ARROW_DATASET_IMPORT_LIB)
-    endif()
-  endif()
-
-  set(DATASET_LINK_LIBS arrow_dataset_shared)
-  set(CYTHON_EXTENSIONS ${CYTHON_EXTENSIONS} _dataset)
-endif()
-
-if(PYARROW_BUILD_PARQUET)
-  # Parquet
-  find_package(Parquet REQUIRED)
-
-  include_directories(SYSTEM ${PARQUET_INCLUDE_DIR})
-
-  if(PYARROW_BUNDLE_ARROW_CPP)
-    file(COPY ${PARQUET_INCLUDE_DIR}/parquet
-         DESTINATION ${BUILD_OUTPUT_ROOT_DIRECTORY}/include)
-  endif()
-
-  if(PYARROW_PARQUET_USE_SHARED)
-    if(PYARROW_BUNDLE_ARROW_CPP)
-      bundle_arrow_lib(PARQUET_SHARED_LIB SO_VERSION ${PARQUET_SO_VERSION})
-      if(MSVC)
-        bundle_arrow_import_lib(PARQUET_IMPORT_LIB)
-      endif()
-    endif()
-    set(PARQUET_LINK_LIBS parquet_shared)
-  else()
-    find_package(Thrift)
-    if(PYARROW_BOOST_USE_SHARED)
-      set(Boost_USE_STATIC_LIBS OFF)
-    else()
-      set(Boost_USE_STATIC_LIBS ON)
-    endif()
-    find_package(Boost COMPONENTS regex REQUIRED)
-    add_thirdparty_lib(boost_regex STATIC_LIB ${Boost_REGEX_LIBRARY_RELEASE})
-    add_thirdparty_lib(thrift STATIC_LIB ${THRIFT_STATIC_LIB})
-    set(PARQUET_LINK_LIBS parquet_static thrift_static boost_regex_static)
-  endif()
-  set(CYTHON_EXTENSIONS ${CYTHON_EXTENSIONS} _parquet)
-endif()
-
-# Plasma
-if(PYARROW_BUILD_PLASMA)
-  find_package(Plasma REQUIRED)
-
-  include_directories(SYSTEM ${PLASMA_INCLUDE_DIR})
-
-  file(COPY ${ARROW_INCLUDE_DIR}/plasma
-       DESTINATION ${BUILD_OUTPUT_ROOT_DIRECTORY}/include)
-
-  if(PYARROW_BUNDLE_ARROW_CPP)
-    bundle_arrow_lib(PLASMA_SHARED_LIB SO_VERSION ${PLASMA_SO_VERSION})
-  endif()
-  set(PLASMA_LINK_LIBS plasma_shared)
-  set(CYTHON_EXTENSIONS ${CYTHON_EXTENSIONS} _plasma)
-  file(COPY ${PLASMA_STORE_SERVER} DESTINATION ${BUILD_OUTPUT_ROOT_DIRECTORY})
-endif()
-
-if(PYARROW_BUILD_ORC)
-  # ORC
-  set(CYTHON_EXTENSIONS ${CYTHON_EXTENSIONS} _orc)
-endif()
-
-# Flight
-if(PYARROW_BUILD_FLIGHT)
-  # Arrow Flight
-  find_package(ArrowPythonFlight REQUIRED)
-
-  if(PYARROW_BUNDLE_ARROW_CPP)
-    bundle_arrow_lib(ARROW_FLIGHT_SHARED_LIB SO_VERSION ${ARROW_SO_VERSION})
-    bundle_arrow_lib(ARROW_PYTHON_FLIGHT_SHARED_LIB SO_VERSION ${ARROW_SO_VERSION})
-    if(MSVC)
-      bundle_arrow_import_lib(ARROW_FLIGHT_IMPORT_LIB)
-      bundle_arrow_import_lib(ARROW_PYTHON_FLIGHT_IMPORT_LIB)
-      # XXX Hardcoded library names because CMake is too stupid to give us
-      # the shared library paths.
-      # https://gitlab.kitware.com/cmake/cmake/issues/16210
-      # bundle_arrow_dependency(libcrypto-1_1-x64)
-      # bundle_arrow_dependency(libssl-1_1-x64)
-    endif()
-  endif()
-
-  set(FLIGHT_LINK_LIBS arrow_flight_shared arrow_python_flight_shared)
-  set(CYTHON_EXTENSIONS ${CYTHON_EXTENSIONS} _flight)
-endif()
-
-# Gandiva
-if(PYARROW_BUILD_GANDIVA)
-  find_package(Gandiva REQUIRED)
-
-  include_directories(SYSTEM ${GANDIVA_INCLUDE_DIR})
-
-  if(PYARROW_BUNDLE_ARROW_CPP)
-    file(COPY ${GANDIVA_INCLUDE_DIR}/gandiva
-         DESTINATION ${BUILD_OUTPUT_ROOT_DIRECTORY}/include)
-
-    bundle_arrow_lib(GANDIVA_SHARED_LIB SO_VERSION ${ARROW_SO_VERSION})
-
-    if(MSVC)
-      bundle_arrow_import_lib(GANDIVA_IMPORT_LIB)
-    endif()
-  endif()
-
-  set(GANDIVA_LINK_LIBS gandiva_shared)
-  set(CYTHON_EXTENSIONS ${CYTHON_EXTENSIONS} gandiva)
-endif()
-
-#
-# Setup and build Cython modules
-#
-
-if(PYARROW_GENERATE_COVERAGE)
-  set(CYTHON_FLAGS "${CYTHON_FLAGS}" "-Xlinetrace=True")
-endif()
-
-foreach(module ${CYTHON_EXTENSIONS})
-  string(REPLACE "." ";" directories ${module})
-  list(GET directories -1 module_name)
-  list(REMOVE_AT directories -1)
-
-  string(REPLACE "." "/" module_root "${module}")
-  set(module_SRC pyarrow/${module_root}.pyx)
-  set_source_files_properties(${module_SRC} PROPERTIES CYTHON_IS_CXX 1)
-
-  cython_add_module(${module_name} ${module_name}_pyx ${module_name}_output ${module_SRC})
-
-  if(directories)
-    string(REPLACE ";" "/" module_output_directory ${directories})
-    set_target_properties(${module_name}
-                          PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${module_output_directory})
-  endif()
-
-  if(PYARROW_BUNDLE_ARROW_CPP)
-    # In the event that we are bundling the shared libraries (e.g. in a
-    # manylinux1 wheel), we need to set the RPATH of the extensions to the
-    # root of the pyarrow/ package so that libarrow/libarrow_python are able
-    # to be loaded properly
-    if(APPLE)
-      set(module_install_rpath "@loader_path/")
-    else()
-      set(module_install_rpath "\$ORIGIN")
-    endif()
-
-    # XXX(wesm): ARROW-2326 this logic is only needed when we have Cython
-    # modules in interior directories. Since all of our C extensions and
-    # bundled libraries are in the same place, we can skip this part
-
-    # list(LENGTH directories i)
-    # while(${i} GREATER 0)
-    #   set(module_install_rpath "${module_install_rpath}/..")
-    #   math(EXPR i "${i} - 1" )
-    # endwhile(${i} GREATER 0)
-
-    set_target_properties(${module_name} PROPERTIES INSTALL_RPATH ${module_install_rpath})
-  endif()
-
-  if(PYARROW_GENERATE_COVERAGE)
-    set_target_properties(${module_name}
-                          PROPERTIES COMPILE_DEFINITIONS
-                                     "CYTHON_TRACE=1;CYTHON_TRACE_NOGIL=1")
-  endif()
-
-  target_link_libraries(${module_name} PRIVATE ${LINK_LIBS})
-
-  # Generated files will be moved to the right directory by setup.py.
-endforeach(module)
-
-# Additional link libraries
-
-if(PYARROW_BUILD_CUDA)
-  target_link_libraries(_cuda PRIVATE ${CUDA_LINK_LIBS})
-endif()
-
-if(PYARROW_BUILD_FLIGHT)
-  target_link_libraries(_flight PRIVATE ${FLIGHT_LINK_LIBS})
-endif()
-
-if(PYARROW_BUILD_DATASET)
-  target_link_libraries(_dataset PRIVATE ${DATASET_LINK_LIBS})
-endif()
-
-if(PYARROW_BUILD_GANDIVA)
-  target_link_libraries(gandiva PRIVATE ${GANDIVA_LINK_LIBS})
-endif()
-
-if(PYARROW_BUILD_PARQUET)
-  target_link_libraries(_parquet PRIVATE ${PARQUET_LINK_LIBS})
-endif()
-
-if(PYARROW_BUILD_PLASMA)
-  target_link_libraries(_plasma PRIVATE ${PLASMA_LINK_LIBS})
-endif()
diff --git a/python/MANIFEST.in b/python/MANIFEST.in
deleted file mode 100644
index ed7012e..0000000
--- a/python/MANIFEST.in
+++ /dev/null
@@ -1,15 +0,0 @@
-include README.md
-include ../LICENSE.txt
-include ../NOTICE.txt
-
-global-include CMakeLists.txt
-graft pyarrow
-graft cmake_modules
-
-global-exclude *.so
-global-exclude *.pyc
-global-exclude *~
-global-exclude \#*
-global-exclude .git*
-global-exclude .DS_Store
-prune .asv
diff --git a/python/README.md b/python/README.md
deleted file mode 100644
index def98a3..0000000
--- a/python/README.md
+++ /dev/null
@@ -1,59 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-## Python library for Apache Arrow
-
-[![pypi](https://img.shields.io/pypi/v/pyarrow.svg)](https://pypi.org/project/pyarrow/) [![conda-forge](https://img.shields.io/conda/vn/conda-forge/pyarrow.svg)](https://anaconda.org/conda-forge/pyarrow)
-
-This library provides a Python API for functionality provided by the Arrow C++
-libraries, along with tools for Arrow integration and interoperability with
-pandas, NumPy, and other software in the Python ecosystem.
-
-## Installing
-
-Across platforms, you can install a recent version of pyarrow with the conda
-package manager:
-
-```shell
-conda install pyarrow -c conda-forge
-```
-
-On Linux, macOS, and Windows, you can also install binary wheels from PyPI with
-pip:
-
-```shell
-pip install pyarrow
-```
-
-If you encounter any issues importing the pip wheels on Windows, you may need
-to install the [Visual C++ Redistributable for Visual Studio 2015][6].
-
-## Development
-
-See [Python Development][2] in the documentation subproject.
-
-### Building the documentation
-
-See [documentation build instructions][1] in the documentation subproject.
-
-[1]: https://github.com/apache/arrow/blob/master/docs/source/developers/documentation.rst
-[2]: https://github.com/apache/arrow/blob/master/docs/source/developers/python.rst
-[3]: https://github.com/pandas-dev/pandas
-[5]: https://arrow.apache.org/docs/latest/python/benchmarks.html
-[6]: https://www.microsoft.com/en-us/download/details.aspx?id=48145
\ No newline at end of file
diff --git a/python/asv-build.sh b/python/asv-build.sh
deleted file mode 100755
index 7de5ff4..0000000
--- a/python/asv-build.sh
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-set -e
-
-# ASV doesn't activate its conda environment for us
-if [ -z "$ASV_ENV_DIR" ]; then exit 1; fi
-
-if [ -z "$CONDA_HOME" ]; then
-  echo "Please set \$CONDA_HOME to point to your root conda installation"
-  exit 1;
-fi
-
-eval "$($CONDA_HOME/bin/conda shell.bash hook)"
-
-conda activate $ASV_ENV_DIR
-echo "== Conda Prefix for benchmarks: " $CONDA_PREFIX " =="
-
-# Build Arrow C++ libraries
-export ARROW_HOME=$CONDA_PREFIX
-export PARQUET_HOME=$CONDA_PREFIX
-export ORC_HOME=$CONDA_PREFIX
-export PROTOBUF_HOME=$CONDA_PREFIX
-export BOOST_ROOT=$CONDA_PREFIX
-
-pushd ../cpp
-mkdir -p build
-pushd build
-
-cmake -GNinja \
-      -DCMAKE_BUILD_TYPE=release \
-      -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \
-      -DARROW_CXXFLAGS=$CXXFLAGS \
-      -DARROW_USE_GLOG=off \
-      -DARROW_FLIGHT=on \
-      -DARROW_ORC=on \
-      -DARROW_PARQUET=on \
-      -DARROW_PYTHON=on \
-      -DARROW_PLASMA=on \
-      -DARROW_S3=on \
-      -DARROW_BUILD_TESTS=off \
-      ..
-cmake --build . --target install
-
-popd
-popd
-
-# Build pyarrow wrappers
-export SETUPTOOLS_SCM_PRETEND_VERSION=0.0.1
-export PYARROW_BUILD_TYPE=release
-export PYARROW_PARALLEL=8
-export PYARROW_WITH_FLIGHT=1
-export PYARROW_WITH_ORC=1
-export PYARROW_WITH_PARQUET=1
-export PYARROW_WITH_PLASMA=1
-
-python setup.py clean
-find pyarrow -name "*.so" -delete
-python setup.py develop
diff --git a/python/asv-install.sh b/python/asv-install.sh
deleted file mode 100755
index beef730..0000000
--- a/python/asv-install.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Deliberately empty, but exists so that we don't have to change
-# asv.conf.json if we need specific commands here.
diff --git a/python/asv-uninstall.sh b/python/asv-uninstall.sh
deleted file mode 100755
index beef730..0000000
--- a/python/asv-uninstall.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Deliberately empty, but exists so that we don't have to change
-# asv.conf.json if we need specific commands here.
diff --git a/python/asv.conf.json b/python/asv.conf.json
deleted file mode 100644
index cdb178c..0000000
--- a/python/asv.conf.json
+++ /dev/null
@@ -1,187 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-{
-    // The version of the config file format.  Do not change, unless
-    // you know what you are doing.
-    "version": 1,
-
-    // The name of the project being benchmarked
-    "project": "pyarrow",
-
-    // The project's homepage
-    "project_url": "https://arrow.apache.org/",
-
-    // The URL or local path of the source code repository for the
-    // project being benchmarked
-    "repo": "..",
-
-    // The Python project's subdirectory in your repo.  If missing or
-    // the empty string, the project is assumed to be located at the root
-    // of the repository.
-    "repo_subdir": "python",
-
-    // Custom build commands for Arrow.
-    "build_command": ["/bin/bash {build_dir}/asv-build.sh"],
-    "install_command": ["/bin/bash {build_dir}/asv-install.sh"],
-    "uninstall_command": ["/bin/bash {build_dir}/asv-uninstall.sh"],
-
-    // List of branches to benchmark. If not provided, defaults to "master"
-    // (for git) or "default" (for mercurial).
-    // "branches": ["master"], // for git
-    // "branches": ["default"],    // for mercurial
-
-    // The DVCS being used.  If not set, it will be automatically
-    // determined from "repo" by looking at the protocol in the URL
-    // (if remote), or by looking for special directories, such as
-    // ".git" (if local).
-    "dvcs": "git",
-
-    // The tool to use to create environments.  May be "conda",
-    // "virtualenv" or other value depending on the plugins in use.
-    // If missing or the empty string, the tool will be automatically
-    // determined by looking for tools on the PATH environment
-    // variable.
-    "environment_type": "conda",
-    // Avoid conda-forge to avoid C++ ABI issues
-    "conda_channels": ["defaults"],
-
-    // the base URL to show a commit for the project.
-    "show_commit_url": "https://github.com/apache/arrow/commit/",
-
-    // The Pythons you'd like to test against.  If not provided, defaults
-    // to the current version of Python used to run `asv`.
-    "pythons": ["3.7"],
-
-    // The matrix of dependencies to test.  Each key is the name of a
-    // package (in PyPI) and the values are version numbers.  An empty
-    // list or empty string indicates to just test against the default
-    // (latest) version. null indicates that the package is to not be
-    // installed. If the package to be tested is only available from
-    // PyPi, and the 'environment_type' is conda, then you can preface
-    // the package name by 'pip+', and the package will be installed via
-    // pip (with all the conda available packages installed first,
-    // followed by the pip installed packages).
-    //
-    // "matrix": {
-    //     "numpy": ["1.6", "1.7"],
-    //     "six": ["", null],        // test with and without six installed
-    //     "pip+emcee": [""],   // emcee is only available for install with pip.
-    // },
-    "matrix": {
-        // Use older boost since it works on more editions of the project
-        "aws-sdk-cpp": [],
-        "boost-cpp": ["1.68.0"],
-        "brotli": [],
-        "cmake": [],
-        "cython": [],
-        "flatbuffers": [],
-        "grpc-cpp": [],
-        "libprotobuf": [],
-        "lz4-c": [],
-        "ninja": [],
-        "numpy": [],
-        "pandas": ["0.25.1"],
-        "pip+setuptools_scm": [],
-        "rapidjson": [],
-        "re2": [],
-        "snappy": [],
-        "thrift-cpp": [],
-        "zstd": [],
-    },
-
-    // Combinations of libraries/python versions can be excluded/included
-    // from the set to test. Each entry is a dictionary containing additional
-    // key-value pairs to include/exclude.
-    //
-    // An exclude entry excludes entries where all values match. The
-    // values are regexps that should match the whole string.
-    //
-    // An include entry adds an environment. Only the packages listed
-    // are installed. The 'python' key is required. The exclude rules
-    // do not apply to includes.
-    //
-    // In addition to package names, the following keys are available:
-    //
-    // - python
-    //     Python version, as in the *pythons* variable above.
-    // - environment_type
-    //     Environment type, as above.
-    // - sys_platform
-    //     Platform, as in sys.platform. Possible values for the common
-    //     cases: 'linux2', 'win32', 'cygwin', 'darwin'.
-    //
-    // "exclude": [
-    //     {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows
-    //     {"environment_type": "conda", "six": null}, // don't run without six on conda
-    // ],
-    //
-    // "include": [
-    //     // additional env for python2.7
-    //     {"python": "2.7", "numpy": "1.8"},
-    //     // additional env if run on windows+conda
-    //     {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""},
-    // ],
-
-    // The directory (relative to the current directory) that benchmarks are
-    // stored in.  If not provided, defaults to "benchmarks"
-    "benchmark_dir": "benchmarks",
-
-    // The directory (relative to the current directory) to cache the Python
-    // environments in.  If not provided, defaults to "env"
-    "env_dir": ".asv/env",
-
-    // The directory (relative to the current directory) that raw benchmark
-    // results are stored in.  If not provided, defaults to "results".
-    "results_dir": ".asv/results",
-
-    // The directory (relative to the current directory) that the html tree
-    // should be written to.  If not provided, defaults to "html".
-    "html_dir": "build/benchmarks/html",
-
-    // The number of characters to retain in the commit hashes.
-    // "hash_length": 8,
-
-    // `asv` will cache wheels of the recent builds in each
-    // environment, making them faster to install next time.  This is
-    // number of builds to keep, per environment.
-    // "wheel_cache_size": 0,
-
-    // The commits after which the regression search in `asv publish`
-    // should start looking for regressions. Dictionary whose keys are
-    // regexps matching to benchmark names, and values corresponding to
-    // the commit (exclusive) after which to start looking for
-    // regressions.  The default is to start from the first commit
-    // with results. If the commit is `null`, regression detection is
-    // skipped for the matching benchmark.
-    //
-    // "regressions_first_commits": {
-    //    "some_benchmark": "352cdf",  // Consider regressions only after this commit
-    //    "another_benchmark": null,   // Skip regression detection altogether
-    // }
-
-    // The thresholds for relative change in results, after which `asv
-    // publish` starts reporting regressions. Dictionary of the same
-    // form as in ``regressions_first_commits``, with values
-    // indicating the thresholds.  If multiple entries match, the
-    // maximum is taken. If no entry matches, the default is 5%.
-    //
-    // "regressions_thresholds": {
-    //    "some_benchmark": 0.01,     // Threshold of 1%
-    //    "another_benchmark": 0.5,   // Threshold of 50%
-    // }
-}
diff --git a/python/benchmarks/__init__.py b/python/benchmarks/__init__.py
deleted file mode 100644
index 13a8339..0000000
--- a/python/benchmarks/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
diff --git a/python/benchmarks/array_ops.py b/python/benchmarks/array_ops.py
deleted file mode 100644
index 696b171..0000000
--- a/python/benchmarks/array_ops.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import pyarrow as pa
-
-
-class ScalarAccess(object):
-    n = 10 ** 5
-
-    def setUp(self):
-        self._array = pa.array(list(range(self.n)), type=pa.int64())
-        self._array_items = list(self._array)
-
-    def time_getitem(self):
-        for i in range(self.n):
-            self._array[i]
-
-    def time_as_py(self):
-        for item in self._array_items:
-            item.as_py()
diff --git a/python/benchmarks/common.py b/python/benchmarks/common.py
deleted file mode 100644
index 48526a4..0000000
--- a/python/benchmarks/common.py
+++ /dev/null
@@ -1,349 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import codecs
-import decimal
-from functools import partial
-import itertools
-import sys
-import unicodedata
-
-import numpy as np
-
-import pyarrow as pa
-
-
-KILOBYTE = 1 << 10
-MEGABYTE = KILOBYTE * KILOBYTE
-
-DEFAULT_NONE_PROB = 0.3
-
-
-def _multiplicate_sequence(base, target_size):
-    q, r = divmod(target_size, len(base))
-    return [base] * q + [base[:r]]
-
-
-def get_random_bytes(n, seed=42):
-    """
-    Generate a random bytes object of size *n*.
-    Note the result might be compressible.
-    """
-    rnd = np.random.RandomState(seed)
-    # Computing a huge random bytestring can be costly, so we get at most
-    # 100KB and duplicate the result as needed
-    base_size = 100003
-    q, r = divmod(n, base_size)
-    if q == 0:
-        result = rnd.bytes(r)
-    else:
-        base = rnd.bytes(base_size)
-        result = b''.join(_multiplicate_sequence(base, n))
-    assert len(result) == n
-    return result
-
-
-def get_random_ascii(n, seed=42):
-    """
-    Get a random ASCII-only unicode string of size *n*.
-    """
-    arr = np.frombuffer(get_random_bytes(n, seed=seed), dtype=np.int8) & 0x7f
-    result, _ = codecs.ascii_decode(arr)
-    assert isinstance(result, str)
-    assert len(result) == n
-    return result
-
-
-def _random_unicode_letters(n, seed=42):
-    """
-    Generate a string of random unicode letters (slow).
-    """
-    def _get_more_candidates():
-        return rnd.randint(0, sys.maxunicode, size=n).tolist()
-
-    rnd = np.random.RandomState(seed)
-    out = []
-    candidates = []
-
-    while len(out) < n:
-        if not candidates:
-            candidates = _get_more_candidates()
-        ch = chr(candidates.pop())
-        # XXX Do we actually care that the code points are valid?
-        if unicodedata.category(ch)[0] == 'L':
-            out.append(ch)
-    return out
-
-
-_1024_random_unicode_letters = _random_unicode_letters(1024)
-
-
-def get_random_unicode(n, seed=42):
-    """
-    Get a random non-ASCII unicode string of size *n*.
-    """
-    indices = np.frombuffer(get_random_bytes(n * 2, seed=seed),
-                            dtype=np.int16) & 1023
-    unicode_arr = np.array(_1024_random_unicode_letters)[indices]
-
-    result = ''.join(unicode_arr.tolist())
-    assert len(result) == n, (len(result), len(unicode_arr))
-    return result
-
-
-class BuiltinsGenerator(object):
-
-    def __init__(self, seed=42):
-        self.rnd = np.random.RandomState(seed)
-
-    def sprinkle(self, lst, prob, value):
-        """
-        Sprinkle *value* entries in list *lst* with likelihood *prob*.
-        """
-        for i, p in enumerate(self.rnd.random_sample(size=len(lst))):
-            if p < prob:
-                lst[i] = value
-
-    def sprinkle_nones(self, lst, prob):
-        """
-        Sprinkle None entries in list *lst* with likelihood *prob*.
-        """
-        self.sprinkle(lst, prob, None)
-
-    def generate_int_list(self, n, none_prob=DEFAULT_NONE_PROB):
-        """
-        Generate a list of Python ints with *none_prob* probability of
-        an entry being None.
-        """
-        data = list(range(n))
-        self.sprinkle_nones(data, none_prob)
-        return data
-
-    def generate_float_list(self, n, none_prob=DEFAULT_NONE_PROB,
-                            use_nan=False):
-        """
-        Generate a list of Python floats with *none_prob* probability of
-        an entry being None (or NaN if *use_nan* is true).
-        """
-        # Make sure we get Python floats, not np.float64
-        data = list(map(float, self.rnd.uniform(0.0, 1.0, n)))
-        assert len(data) == n
-        self.sprinkle(data, none_prob, value=float('nan') if use_nan else None)
-        return data
-
-    def generate_bool_list(self, n, none_prob=DEFAULT_NONE_PROB):
-        """
-        Generate a list of Python bools with *none_prob* probability of
-        an entry being None.
-        """
-        # Make sure we get Python bools, not np.bool_
-        data = [bool(x >= 0.5) for x in self.rnd.uniform(0.0, 1.0, n)]
-        assert len(data) == n
-        self.sprinkle_nones(data, none_prob)
-        return data
-
-    def generate_decimal_list(self, n, none_prob=DEFAULT_NONE_PROB,
-                              use_nan=False):
-        """
-        Generate a list of Python Decimals with *none_prob* probability of
-        an entry being None (or NaN if *use_nan* is true).
-        """
-        data = [decimal.Decimal('%.9f' % f)
-                for f in self.rnd.uniform(0.0, 1.0, n)]
-        assert len(data) == n
-        self.sprinkle(data, none_prob,
-                      value=decimal.Decimal('nan') if use_nan else None)
-        return data
-
-    def generate_object_list(self, n, none_prob=DEFAULT_NONE_PROB):
-        """
-        Generate a list of generic Python objects with *none_prob*
-        probability of an entry being None.
-        """
-        data = [object() for i in range(n)]
-        self.sprinkle_nones(data, none_prob)
-        return data
-
-    def _generate_varying_sequences(self, random_factory, n, min_size,
-                                    max_size, none_prob):
-        """
-        Generate a list of *n* sequences of varying size between *min_size*
-        and *max_size*, with *none_prob* probability of an entry being None.
-        The base material for each sequence is obtained by calling
-        `random_factory(<some size>)`
-        """
-        base_size = 10000
-        base = random_factory(base_size + max_size)
-        data = []
-        for i in range(n):
-            off = self.rnd.randint(base_size)
-            if min_size == max_size:
-                size = min_size
-            else:
-                size = self.rnd.randint(min_size, max_size + 1)
-            data.append(base[off:off + size])
-        self.sprinkle_nones(data, none_prob)
-        assert len(data) == n
-        return data
-
-    def generate_fixed_binary_list(self, n, size, none_prob=DEFAULT_NONE_PROB):
-        """
-        Generate a list of bytestrings with a fixed *size*.
-        """
-        return self._generate_varying_sequences(get_random_bytes, n,
-                                                size, size, none_prob)
-
-    def generate_varying_binary_list(self, n, min_size, max_size,
-                                     none_prob=DEFAULT_NONE_PROB):
-        """
-        Generate a list of bytestrings with a random size between
-        *min_size* and *max_size*.
-        """
-        return self._generate_varying_sequences(get_random_bytes, n,
-                                                min_size, max_size, none_prob)
-
-    def generate_ascii_string_list(self, n, min_size, max_size,
-                                   none_prob=DEFAULT_NONE_PROB):
-        """
-        Generate a list of ASCII strings with a random size between
-        *min_size* and *max_size*.
-        """
-        return self._generate_varying_sequences(get_random_ascii, n,
-                                                min_size, max_size, none_prob)
-
-    def generate_unicode_string_list(self, n, min_size, max_size,
-                                     none_prob=DEFAULT_NONE_PROB):
-        """
-        Generate a list of unicode strings with a random size between
-        *min_size* and *max_size*.
-        """
-        return self._generate_varying_sequences(get_random_unicode, n,
-                                                min_size, max_size, none_prob)
-
-    def generate_int_list_list(self, n, min_size, max_size,
-                               none_prob=DEFAULT_NONE_PROB):
-        """
-        Generate a list of lists of Python ints with a random size between
-        *min_size* and *max_size*.
-        """
-        return self._generate_varying_sequences(
-            partial(self.generate_int_list, none_prob=none_prob),
-            n, min_size, max_size, none_prob)
-
-    def generate_tuple_list(self, n, none_prob=DEFAULT_NONE_PROB):
-        """
-        Generate a list of tuples with random values.
-        Each tuple has the form `(int value, float value, bool value)`
-        """
-        dicts = self.generate_dict_list(n, none_prob=none_prob)
-        tuples = [(d.get('u'), d.get('v'), d.get('w'))
-                  if d is not None else None
-                  for d in dicts]
-        assert len(tuples) == n
-        return tuples
-
-    def generate_dict_list(self, n, none_prob=DEFAULT_NONE_PROB):
-        """
-        Generate a list of dicts with random values.
-        Each dict has the form
-
-            `{'u': int value, 'v': float value, 'w': bool value}`
-        """
-        ints = self.generate_int_list(n, none_prob=none_prob)
-        floats = self.generate_float_list(n, none_prob=none_prob)
-        bools = self.generate_bool_list(n, none_prob=none_prob)
-        dicts = []
-        # Keep half the Nones, omit the other half
-        keep_nones = itertools.cycle([True, False])
-        for u, v, w in zip(ints, floats, bools):
-            d = {}
-            if u is not None or next(keep_nones):
-                d['u'] = u
-            if v is not None or next(keep_nones):
-                d['v'] = v
-            if w is not None or next(keep_nones):
-                d['w'] = w
-            dicts.append(d)
-        self.sprinkle_nones(dicts, none_prob)
-        assert len(dicts) == n
-        return dicts
-
-    def get_type_and_builtins(self, n, type_name):
-        """
-        Return a `(arrow type, list)` tuple where the arrow type
-        corresponds to the given logical *type_name*, and the list
-        is a list of *n* random-generated Python objects compatible
-        with the arrow type.
-        """
-        size = None
-
-        if type_name in ('bool', 'decimal', 'ascii', 'unicode', 'int64 list'):
-            kind = type_name
-        elif type_name.startswith(('int', 'uint')):
-            kind = 'int'
-        elif type_name.startswith('float'):
-            kind = 'float'
-        elif type_name.startswith('struct'):
-            kind = 'struct'
-        elif type_name == 'binary':
-            kind = 'varying binary'
-        elif type_name.startswith('binary'):
-            kind = 'fixed binary'
-            size = int(type_name[6:])
-            assert size > 0
-        else:
-            raise ValueError("unrecognized type %r" % (type_name,))
-
-        if kind in ('int', 'float'):
-            ty = getattr(pa, type_name)()
-        elif kind == 'bool':
-            ty = pa.bool_()
-        elif kind == 'decimal':
-            ty = pa.decimal128(9, 9)
-        elif kind == 'fixed binary':
-            ty = pa.binary(size)
-        elif kind == 'varying binary':
-            ty = pa.binary()
-        elif kind in ('ascii', 'unicode'):
-            ty = pa.string()
-        elif kind == 'int64 list':
-            ty = pa.list_(pa.int64())
-        elif kind == 'struct':
-            ty = pa.struct([pa.field('u', pa.int64()),
-                            pa.field('v', pa.float64()),
-                            pa.field('w', pa.bool_())])
-
-        factories = {
-            'int': self.generate_int_list,
-            'float': self.generate_float_list,
-            'bool': self.generate_bool_list,
-            'decimal': self.generate_decimal_list,
-            'fixed binary': partial(self.generate_fixed_binary_list,
-                                    size=size),
-            'varying binary': partial(self.generate_varying_binary_list,
-                                      min_size=3, max_size=40),
-            'ascii': partial(self.generate_ascii_string_list,
-                             min_size=3, max_size=40),
-            'unicode': partial(self.generate_unicode_string_list,
-                               min_size=3, max_size=40),
-            'int64 list': partial(self.generate_int_list_list,
-                                  min_size=0, max_size=20),
-            'struct': self.generate_dict_list,
-            'struct from tuples': self.generate_tuple_list,
-        }
-        data = factories[kind](n)
-        return ty, data
diff --git a/python/benchmarks/convert_builtins.py b/python/benchmarks/convert_builtins.py
deleted file mode 100644
index 48a38fa..0000000
--- a/python/benchmarks/convert_builtins.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import pyarrow as pa
-
-from . import common
-
-
-# TODO:
-# - test dates and times
-
-
-class ConvertPyListToArray(object):
-    """
-    Benchmark pa.array(list of values, type=...)
-    """
-    size = 10 ** 5
-    types = ('int32', 'uint32', 'int64', 'uint64',
-             'float32', 'float64', 'bool', 'decimal',
-             'binary', 'binary10', 'ascii', 'unicode',
-             'int64 list', 'struct', 'struct from tuples')
-
-    param_names = ['type']
-    params = [types]
-
-    def setup(self, type_name):
-        gen = common.BuiltinsGenerator()
-        self.ty, self.data = gen.get_type_and_builtins(self.size, type_name)
-
-    def time_convert(self, *args):
-        pa.array(self.data, type=self.ty)
-
-
-class InferPyListToArray(object):
-    """
-    Benchmark pa.array(list of values) with type inference
-    """
-    size = 10 ** 5
-    types = ('int64', 'float64', 'bool', 'decimal', 'binary', 'ascii',
-             'unicode', 'int64 list', 'struct')
-
-    param_names = ['type']
-    params = [types]
-
-    def setup(self, type_name):
-        gen = common.BuiltinsGenerator()
-        self.ty, self.data = gen.get_type_and_builtins(self.size, type_name)
-
-    def time_infer(self, *args):
-        arr = pa.array(self.data)
-        assert arr.type == self.ty
-
-
-class ConvertArrayToPyList(object):
-    """
-    Benchmark pa.array.to_pylist()
-    """
-    size = 10 ** 5
-    types = ('int32', 'uint32', 'int64', 'uint64',
-             'float32', 'float64', 'bool', 'decimal',
-             'binary', 'binary10', 'ascii', 'unicode',
-             'int64 list', 'struct')
-
-    param_names = ['type']
-    params = [types]
-
-    def setup(self, type_name):
-        gen = common.BuiltinsGenerator()
-        self.ty, self.data = gen.get_type_and_builtins(self.size, type_name)
-        self.arr = pa.array(self.data, type=self.ty)
-
-    def time_convert(self, *args):
-        self.arr.to_pylist()
diff --git a/python/benchmarks/convert_pandas.py b/python/benchmarks/convert_pandas.py
deleted file mode 100644
index 9cf6bde..0000000
--- a/python/benchmarks/convert_pandas.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import numpy as np
-import pandas as pd
-import pandas.util.testing as tm
-
-import pyarrow as pa
-
-
-class PandasConversionsBase(object):
-    def setup(self, n, dtype):
-        if dtype == 'float64_nans':
-            arr = np.arange(n).astype('float64')
-            arr[arr % 10 == 0] = np.nan
-        else:
-            arr = np.arange(n).astype(dtype)
-        self.data = pd.DataFrame({'column': arr})
-
-
-class PandasConversionsToArrow(PandasConversionsBase):
-    param_names = ('size', 'dtype')
-    params = ((10, 10 ** 6), ('int64', 'float64', 'float64_nans', 'str'))
-
-    def time_from_series(self, n, dtype):
-        pa.Table.from_pandas(self.data)
-
-
-class PandasConversionsFromArrow(PandasConversionsBase):
-    param_names = ('size', 'dtype')
-    params = ((10, 10 ** 6), ('int64', 'float64', 'float64_nans', 'str'))
-
-    def setup(self, n, dtype):
-        super(PandasConversionsFromArrow, self).setup(n, dtype)
-        self.arrow_data = pa.Table.from_pandas(self.data)
-
-    def time_to_series(self, n, dtype):
-        self.arrow_data.to_pandas()
-
-
-class ToPandasStrings(object):
-
-    param_names = ('uniqueness', 'total')
-    params = ((0.001, 0.01, 0.1, 0.5), (1000000,))
-    string_length = 25
-
-    def setup(self, uniqueness, total):
-        nunique = int(total * uniqueness)
-        unique_values = [tm.rands(self.string_length) for i in range(nunique)]
-        values = unique_values * (total // nunique)
-        self.arr = pa.array(values, type=pa.string())
-        self.table = pa.Table.from_arrays([self.arr], ['f0'])
-
-    def time_to_pandas_dedup(self, *args):
-        self.arr.to_pandas()
-
-    def time_to_pandas_no_dedup(self, *args):
-        self.arr.to_pandas(deduplicate_objects=False)
-
-
-class ZeroCopyPandasRead(object):
-
-    def setup(self):
-        # Transpose to make column-major
-        values = np.random.randn(10, 100000)
-
-        df = pd.DataFrame(values.T)
-        ctx = pa.default_serialization_context()
-
-        self.serialized = ctx.serialize(df)
-        self.as_buffer = self.serialized.to_buffer()
-        self.as_components = self.serialized.to_components()
-
-    def time_deserialize_from_buffer(self):
-        pa.deserialize(self.as_buffer)
-
-    def time_deserialize_from_components(self):
-        pa.deserialize_components(self.as_components)
-
-
-class SerializeDeserializePandas(object):
-
-    def setup(self):
-        # 10 million length
-        n = 10000000
-        self.df = pd.DataFrame({'data': np.random.randn(n)})
-        self.serialized = pa.serialize_pandas(self.df)
-
-    def time_serialize_pandas(self):
-        pa.serialize_pandas(self.df)
-
-    def time_deserialize_pandas(self):
-        pa.deserialize_pandas(self.serialized)
-
-
-class TableFromPandasMicroperformance(object):
-    # ARROW-4629
-
-    def setup(self):
-        ser = pd.Series(range(10000))
-        df = pd.DataFrame({col: ser.copy(deep=True) for col in range(100)})
-        # Simulate a real dataset by converting some columns to strings
-        self.df = df.astype({col: str for col in range(50)})
-
-    def time_Table_from_pandas(self):
-        for _ in range(50):
-            pa.Table.from_pandas(self.df, nthreads=1)
diff --git a/python/benchmarks/io.py b/python/benchmarks/io.py
deleted file mode 100644
index 01a9acb..0000000
--- a/python/benchmarks/io.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import time
-import pyarrow as pa
-
-
-class HighLatencyReader(object):
-
-    def __init__(self, raw, latency):
-        self.raw = raw
-        self.latency = latency
-
-    def close(self):
-        self.raw.close()
-
-    @property
-    def closed(self):
-        return self.raw.closed
-
-    def read(self, nbytes=None):
-        time.sleep(self.latency)
-        return self.raw.read(nbytes)
-
-
-class HighLatencyWriter(object):
-
-    def __init__(self, raw, latency):
-        self.raw = raw
-        self.latency = latency
-
-    def close(self):
-        self.raw.close()
-
-    @property
-    def closed(self):
-        return self.raw.closed
-
-    def write(self, data):
-        time.sleep(self.latency)
-        self.raw.write(data)
-
-
-class BufferedIOHighLatency(object):
-    """Benchmark creating a parquet manifest."""
-
-    increment = 1024
-    total_size = 16 * (1 << 20)  # 16 MB
-    buffer_size = 1 << 20  # 1 MB
-    latency = 0.1  # 100ms
-
-    param_names = ('latency',)
-    params = [0, 0.01, 0.1]
-
-    def time_buffered_writes(self, latency):
-        test_data = b'x' * self.increment
-        bytes_written = 0
-        out = pa.BufferOutputStream()
-        slow_out = HighLatencyWriter(out, latency)
-        buffered_out = pa.output_stream(slow_out, buffer_size=self.buffer_size)
-
-        while bytes_written < self.total_size:
-            buffered_out.write(test_data)
-            bytes_written += self.increment
-        buffered_out.flush()
-
-    def time_buffered_reads(self, latency):
-        bytes_read = 0
-        reader = pa.input_stream(pa.py_buffer(b'x' * self.total_size))
-        slow_reader = HighLatencyReader(reader, latency)
-        buffered_reader = pa.input_stream(slow_reader,
-                                          buffer_size=self.buffer_size)
-        while bytes_read < self.total_size:
-            buffered_reader.read(self.increment)
-            bytes_read += self.increment
diff --git a/python/benchmarks/microbenchmarks.py b/python/benchmarks/microbenchmarks.py
deleted file mode 100644
index f8ba383..0000000
--- a/python/benchmarks/microbenchmarks.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import pyarrow.benchmark as pb
-
-from . import common
-
-
-class PandasObjectIsNull(object):
-    size = 10 ** 5
-    types = ('int', 'float', 'object', 'decimal')
-
-    param_names = ['type']
-    params = [types]
-
-    def setup(self, type_name):
-        gen = common.BuiltinsGenerator()
-        if type_name == 'int':
-            lst = gen.generate_int_list(self.size)
-        elif type_name == 'float':
-            lst = gen.generate_float_list(self.size, use_nan=True)
-        elif type_name == 'object':
-            lst = gen.generate_object_list(self.size)
-        elif type_name == 'decimal':
-            lst = gen.generate_decimal_list(self.size)
-        else:
-            assert 0
-        self.lst = lst
-
-    def time_PandasObjectIsNull(self, *args):
-        pb.benchmark_PandasObjectIsNull(self.lst)
diff --git a/python/benchmarks/parquet.py b/python/benchmarks/parquet.py
deleted file mode 100644
index 3aeca42..0000000
--- a/python/benchmarks/parquet.py
+++ /dev/null
@@ -1,156 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import shutil
-import tempfile
-
-from pandas.util.testing import rands
-import numpy as np
-import pandas as pd
-
-import pyarrow as pa
-try:
-    import pyarrow.parquet as pq
-except ImportError:
-    pq = None
-
-
-class ParquetManifestCreation(object):
-    """Benchmark creating a parquet manifest."""
-
-    size = 10 ** 6
-    tmpdir = None
-
-    param_names = ('num_partitions', 'num_threads')
-    params = [(10, 100, 1000), (1, 8)]
-
-    def setup(self, num_partitions, num_threads):
-        if pq is None:
-            raise NotImplementedError("Parquet support not enabled")
-
-        self.tmpdir = tempfile.mkdtemp('benchmark_parquet')
-        rnd = np.random.RandomState(42)
-        num1 = rnd.randint(0, num_partitions, size=self.size)
-        num2 = rnd.randint(0, 1000, size=self.size)
-        output_df = pd.DataFrame({'num1': num1, 'num2': num2})
-        output_table = pa.Table.from_pandas(output_df)
-        pq.write_to_dataset(output_table, self.tmpdir, ['num1'])
-
-    def teardown(self, num_partitions, num_threads):
-        if self.tmpdir is not None:
-            shutil.rmtree(self.tmpdir)
-
-    def time_manifest_creation(self, num_partitions, num_threads):
-        pq.ParquetManifest(self.tmpdir, metadata_nthreads=num_threads)
-
-
-class ParquetWriteBinary(object):
-
-    def setup(self):
-        nuniques = 100000
-        value_size = 50
-        length = 1000000
-        num_cols = 10
-
-        unique_values = np.array([rands(value_size) for
-                                  i in range(nuniques)], dtype='O')
-        values = unique_values[np.random.randint(0, nuniques, size=length)]
-        self.table = pa.table([pa.array(values) for i in range(num_cols)],
-                              names=['f{}'.format(i) for i in range(num_cols)])
-        self.table_df = self.table.to_pandas()
-
-    def time_write_binary_table(self):
-        out = pa.BufferOutputStream()
-        pq.write_table(self.table, out)
-
-    def time_write_binary_table_uncompressed(self):
-        out = pa.BufferOutputStream()
-        pq.write_table(self.table, out, compression='none')
-
-    def time_write_binary_table_no_dictionary(self):
-        out = pa.BufferOutputStream()
-        pq.write_table(self.table, out, use_dictionary=False)
-
-    def time_convert_pandas_and_write_binary_table(self):
-        out = pa.BufferOutputStream()
-        pq.write_table(pa.table(self.table_df), out)
-
-
-def generate_dict_strings(string_size, nunique, length, random_order=True):
-    uniques = np.array([rands(string_size) for i in range(nunique)], dtype='O')
-    if random_order:
-        indices = np.random.randint(0, nunique, size=length).astype('i4')
-    else:
-        indices = np.arange(nunique).astype('i4').repeat(length // nunique)
-    return pa.DictionaryArray.from_arrays(indices, uniques)
-
-
-def generate_dict_table(num_cols, string_size, nunique, length,
-                        random_order=True):
-    data = generate_dict_strings(string_size, nunique, length,
-                                 random_order=random_order)
-    return pa.table([
-        data for i in range(num_cols)
-    ], names=['f{}'.format(i) for i in range(num_cols)])
-
-
-class ParquetWriteDictionaries(object):
-
-    param_names = ('nunique',)
-    params = [(1000), (100000)]
-
-    def setup(self, nunique):
-        self.num_cols = 10
-        self.value_size = 32
-        self.nunique = nunique
-        self.length = 10000000
-
-        self.table = generate_dict_table(self.num_cols, self.value_size,
-                                         self.nunique, self.length)
-        self.table_sequential = generate_dict_table(self.num_cols,
-                                                    self.value_size,
-                                                    self.nunique, self.length,
-                                                    random_order=False)
-
-    def time_write_random_order(self, nunique):
-        pq.write_table(self.table, pa.BufferOutputStream())
-
-    def time_write_sequential(self, nunique):
-        pq.write_table(self.table_sequential, pa.BufferOutputStream())
-
-
-class ParquetManyColumns(object):
-
-    total_cells = 10000000
-    param_names = ('num_cols',)
-    params = [100, 1000, 10000]
-
-    def setup(self, num_cols):
-        num_rows = self.total_cells // num_cols
-        self.table = pa.table({'c' + str(i): np.random.randn(num_rows)
-                               for i in range(num_cols)})
-
-        out = pa.BufferOutputStream()
-        pq.write_table(self.table, out)
-        self.buf = out.getvalue()
-
-    def time_write(self, num_cols):
-        out = pa.BufferOutputStream()
-        pq.write_table(self.table, out)
-
-    def time_read(self, num_cols):
-        pq.read_table(self.buf)
diff --git a/python/benchmarks/plasma.py b/python/benchmarks/plasma.py
deleted file mode 100644
index 90a2845..0000000
--- a/python/benchmarks/plasma.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import numpy as np
-import timeit
-
-try:
-    import pyarrow.plasma as plasma
-except ImportError:
-    # TODO(wesm): These are not asv benchmarks, so we can just fail
-    # silently here
-    pass
-
-
-class SimplePlasmaThroughput(object):
-    """Benchmark plasma store throughput with a single client."""
-
-    params = [1000, 100000, 10000000]
-
-    timer = timeit.default_timer
-
-    def setup(self, size):
-        self.plasma_store_ctx = plasma.start_plasma_store(
-            plasma_store_memory=10**9)
-        plasma_store_name, p = self.plasma_store_ctx.__enter__()
-        self.plasma_client = plasma.connect(plasma_store_name)
-
-        self.data = np.random.randn(size // 8)
-
-    def teardown(self, size):
-        self.plasma_store_ctx.__exit__(None, None, None)
-
-    def time_plasma_put_data(self, size):
-        self.plasma_client.put(self.data)
-
-
-class SimplePlasmaLatency(object):
-    """Benchmark plasma store latency with a single client."""
-
-    timer = timeit.default_timer
-
-    def setup(self):
-        self.plasma_store_ctx = plasma.start_plasma_store(
-            plasma_store_memory=10**9)
-        plasma_store_name, p = self.plasma_store_ctx.__enter__()
-        self.plasma_client = plasma.connect(plasma_store_name)
-
-    def teardown(self):
-        self.plasma_store_ctx.__exit__(None, None, None)
-
-    def time_plasma_put(self):
-        for i in range(1000):
-            self.plasma_client.put(1)
-
-    def time_plasma_putget(self):
-        for i in range(1000):
-            x = self.plasma_client.put(1)
-            self.plasma_client.get(x)
diff --git a/python/benchmarks/streaming.py b/python/benchmarks/streaming.py
deleted file mode 100644
index c0c63e6..0000000
--- a/python/benchmarks/streaming.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import numpy as np
-import pandas as pd
-import pyarrow as pa
-
-from . import common
-from .common import KILOBYTE, MEGABYTE
-
-
-def generate_chunks(total_size, nchunks, ncols, dtype=np.dtype('int64')):
-    rowsize = total_size // nchunks // ncols
-    assert rowsize % dtype.itemsize == 0
-
-    def make_column(col, chunk):
-        return np.frombuffer(common.get_random_bytes(
-            rowsize, seed=col + 997 * chunk)).view(dtype)
-
-    return [pd.DataFrame({
-            'c' + str(col): make_column(col, chunk)
-            for col in range(ncols)})
-            for chunk in range(nchunks)]
-
-
-class StreamReader(object):
-    """
-    Benchmark in-memory streaming to a Pandas dataframe.
-    """
-    total_size = 64 * MEGABYTE
-    ncols = 8
-    chunk_sizes = [16 * KILOBYTE, 256 * KILOBYTE, 8 * MEGABYTE]
-
-    param_names = ['chunk_size']
-    params = [chunk_sizes]
-
-    def setup(self, chunk_size):
-        # Note we're careful to stream different chunks instead of
-        # streaming N times the same chunk, so that we avoid operating
-        # entirely out of L1/L2.
-        chunks = generate_chunks(self.total_size,
-                                 nchunks=self.total_size // chunk_size,
-                                 ncols=self.ncols)
-        batches = [pa.RecordBatch.from_pandas(df)
-                   for df in chunks]
-        schema = batches[0].schema
-        sink = pa.BufferOutputStream()
-        stream_writer = pa.RecordBatchStreamWriter(sink, schema)
-        for batch in batches:
-            stream_writer.write_batch(batch)
-        self.source = sink.getvalue()
-
-    def time_read_to_dataframe(self, *args):
-        reader = pa.RecordBatchStreamReader(self.source)
-        table = reader.read_all()
-        df = table.to_pandas()  # noqa
diff --git a/python/cmake_modules b/python/cmake_modules
deleted file mode 120000
index 76e2a8d..0000000
--- a/python/cmake_modules
+++ /dev/null
@@ -1 +0,0 @@
-../cpp/cmake_modules
\ No newline at end of file
diff --git a/python/examples/flight/client.py b/python/examples/flight/client.py
deleted file mode 100644
index ed6ce54..0000000
--- a/python/examples/flight/client.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-"""An example Flight CLI client."""
-
-import argparse
-import sys
-
-import pyarrow
-import pyarrow.flight
-import pyarrow.csv as csv
-
-
-def list_flights(args, client, connection_args={}):
-    print('Flights\n=======')
-    for flight in client.list_flights():
-        descriptor = flight.descriptor
-        if descriptor.descriptor_type == pyarrow.flight.DescriptorType.PATH:
-            print("Path:", descriptor.path)
-        elif descriptor.descriptor_type == pyarrow.flight.DescriptorType.CMD:
-            print("Command:", descriptor.command)
-        else:
-            print("Unknown descriptor type")
-
-        print("Total records:", end=" ")
-        if flight.total_records >= 0:
-            print(flight.total_records)
-        else:
-            print("Unknown")
-
-        print("Total bytes:", end=" ")
-        if flight.total_bytes >= 0:
-            print(flight.total_bytes)
-        else:
-            print("Unknown")
-
-        print("Number of endpoints:", len(flight.endpoints))
-        print("Schema:")
-        print(flight.schema)
-        print('---')
-
-    print('\nActions\n=======')
-    for action in client.list_actions():
-        print("Type:", action.type)
-        print("Description:", action.description)
-        print('---')
-
-
-def do_action(args, client, connection_args={}):
-    try:
-        buf = pyarrow.allocate_buffer(0)
-        action = pyarrow.flight.Action(args.action_type, buf)
-        print('Running action', args.action_type)
-        for result in client.do_action(action):
-            print("Got result", result.body.to_pybytes())
-    except pyarrow.lib.ArrowIOError as e:
-        print("Error calling action:", e)
-
-
-def push_data(args, client, connection_args={}):
-    print('File Name:', args.file)
-    my_table = csv.read_csv(args.file)
-    print('Table rows=', str(len(my_table)))
-    df = my_table.to_pandas()
-    print(df.head())
-    writer, _ = client.do_put(
-        pyarrow.flight.FlightDescriptor.for_path(args.file), my_table.schema)
-    writer.write_table(my_table)
-    writer.close()
-
-
-def get_flight(args, client, connection_args={}):
-    if args.path:
-        descriptor = pyarrow.flight.FlightDescriptor.for_path(*args.path)
-    else:
-        descriptor = pyarrow.flight.FlightDescriptor.for_command(args.command)
-
-    info = client.get_flight_info(descriptor)
-    for endpoint in info.endpoints:
-        print('Ticket:', endpoint.ticket)
-        for location in endpoint.locations:
-            print(location)
-            get_client = pyarrow.flight.FlightClient(location,
-                                                     **connection_args)
-            reader = get_client.do_get(endpoint.ticket)
-            df = reader.read_pandas()
-            print(df)
-
-
-def _add_common_arguments(parser):
-    parser.add_argument('--tls', action='store_true',
-                        help='Enable transport-level security')
-    parser.add_argument('--tls-roots', default=None,
-                        help='Path to trusted TLS certificate(s)')
-    parser.add_argument("--mtls", nargs=2, default=None,
-                        metavar=('CERTFILE', 'KEYFILE'),
-                        help="Enable transport-level security")
-    parser.add_argument('host', type=str,
-                        help="Address or hostname to connect to")
-
-
-def main():
-    parser = argparse.ArgumentParser()
-    subcommands = parser.add_subparsers()
-
-    cmd_list = subcommands.add_parser('list')
-    cmd_list.set_defaults(action='list')
-    _add_common_arguments(cmd_list)
-    cmd_list.add_argument('-l', '--list', action='store_true',
-                          help="Print more details.")
-
-    cmd_do = subcommands.add_parser('do')
-    cmd_do.set_defaults(action='do')
-    _add_common_arguments(cmd_do)
-    cmd_do.add_argument('action_type', type=str,
-                        help="The action type to run.")
-
-    cmd_put = subcommands.add_parser('put')
-    cmd_put.set_defaults(action='put')
-    _add_common_arguments(cmd_put)
-    cmd_put.add_argument('file', type=str,
-                         help="CSV file to upload.")
-
-    cmd_get = subcommands.add_parser('get')
-    cmd_get.set_defaults(action='get')
-    _add_common_arguments(cmd_get)
-    cmd_get_descriptor = cmd_get.add_mutually_exclusive_group(required=True)
-    cmd_get_descriptor.add_argument('-p', '--path', type=str, action='append',
-                                    help="The path for the descriptor.")
-    cmd_get_descriptor.add_argument('-c', '--command', type=str,
-                                    help="The command for the descriptor.")
-
-    args = parser.parse_args()
-    if not hasattr(args, 'action'):
-        parser.print_help()
-        sys.exit(1)
-
-    commands = {
-        'list': list_flights,
-        'do': do_action,
-        'get': get_flight,
-        'put': push_data,
-    }
-    host, port = args.host.split(':')
-    port = int(port)
-    scheme = "grpc+tcp"
-    connection_args = {}
-    if args.tls:
-        scheme = "grpc+tls"
-        if args.tls_roots:
-            with open(args.tls_roots, "rb") as root_certs:
-                connection_args["tls_root_certs"] = root_certs.read()
-    if args.mtls:
-        with open(args.mtls[0], "rb") as cert_file:
-            tls_cert_chain = cert_file.read()
-        with open(args.mtls[1], "rb") as key_file:
-            tls_private_key = key_file.read()
-        connection_args["cert_chain"] = tls_cert_chain
-        connection_args["private_key"] = tls_private_key
-    client = pyarrow.flight.FlightClient(f"{scheme}://{host}:{port}",
-                                         **connection_args)
-    while True:
-        try:
-            action = pyarrow.flight.Action("healthcheck", b"")
-            options = pyarrow.flight.FlightCallOptions(timeout=1)
-            list(client.do_action(action, options=options))
-            break
-        except pyarrow.ArrowIOError as e:
-            if "Deadline" in str(e):
-                print("Server is not ready, waiting...")
-    commands[args.action](args, client, connection_args)
-
-
-if __name__ == '__main__':
-    main()
diff --git a/python/examples/flight/middleware.py b/python/examples/flight/middleware.py
deleted file mode 100644
index 2056bae..0000000
--- a/python/examples/flight/middleware.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-"""Example of invisibly propagating a request ID with middleware."""
-
-import argparse
-import sys
-import threading
-import uuid
-
-import pyarrow as pa
-import pyarrow.flight as flight
-
-
-class TraceContext:
-    _locals = threading.local()
-    _locals.trace_id = None
-
-    @classmethod
-    def current_trace_id(cls):
-        if not getattr(cls._locals, "trace_id", None):
-            cls.set_trace_id(uuid.uuid4().hex)
-        return cls._locals.trace_id
-
-    @classmethod
-    def set_trace_id(cls, trace_id):
-        cls._locals.trace_id = trace_id
-
-
-TRACE_HEADER = "x-tracing-id"
-
-
-class TracingServerMiddleware(flight.ServerMiddleware):
-    def __init__(self, trace_id):
-        self.trace_id = trace_id
-
-    def sending_headers(self):
-        return {
-            TRACE_HEADER: self.trace_id,
-        }
-
-
-class TracingServerMiddlewareFactory(flight.ServerMiddlewareFactory):
-    def start_call(self, info, headers):
-        print("Starting new call:", info)
-        if TRACE_HEADER in headers:
-            trace_id = headers[TRACE_HEADER][0]
-            print("Found trace header with value:", trace_id)
-            TraceContext.set_trace_id(trace_id)
-        return TracingServerMiddleware(TraceContext.current_trace_id())
-
-
-class TracingClientMiddleware(flight.ClientMiddleware):
-    def sending_headers(self):
-        print("Sending trace ID:", TraceContext.current_trace_id())
-        return {
-            "x-tracing-id": TraceContext.current_trace_id(),
-        }
-
-    def received_headers(self, headers):
-        if TRACE_HEADER in headers:
-            trace_id = headers[TRACE_HEADER][0]
-            print("Found trace header with value:", trace_id)
-            # Don't overwrite our trace ID
-
-
-class TracingClientMiddlewareFactory(flight.ClientMiddlewareFactory):
-    def start_call(self, info):
-        print("Starting new call:", info)
-        return TracingClientMiddleware()
-
-
-class FlightServer(flight.FlightServerBase):
-    def __init__(self, delegate, **kwargs):
-        super().__init__(**kwargs)
-        if delegate:
-            self.delegate = flight.connect(
-                delegate,
-                middleware=(TracingClientMiddlewareFactory(),))
-        else:
-            self.delegate = None
-
-    def list_actions(self, context):
-        return [
-            ("get-trace-id", "Get the trace context ID."),
-        ]
-
-    def do_action(self, context, action):
-        trace_middleware = context.get_middleware("trace")
-        if trace_middleware:
-            TraceContext.set_trace_id(trace_middleware.trace_id)
-        if action.type == "get-trace-id":
-            if self.delegate:
-                for result in self.delegate.do_action(action):
-                    yield result
-            else:
-                trace_id = TraceContext.current_trace_id().encode("utf-8")
-                print("Returning trace ID:", trace_id)
-                buf = pa.py_buffer(trace_id)
-                yield pa.flight.Result(buf)
-        else:
-            raise KeyError(f"Unknown action {action.type!r}")
-
-
-def main():
-    parser = argparse.ArgumentParser()
-
-    subparsers = parser.add_subparsers(dest="command")
-    client = subparsers.add_parser("client", help="Run the client.")
-    client.add_argument("server")
-    client.add_argument("--request-id", default=None)
-
-    server = subparsers.add_parser("server", help="Run the server.")
-    server.add_argument(
-        "--listen",
-        required=True,
-        help="The location to listen on (example: grpc://localhost:5050)",
-    )
-    server.add_argument(
-        "--delegate",
-        required=False,
-        default=None,
-        help=("A location to delegate to. That is, this server will "
-              "simply call the given server for the response. Demonstrates "
-              "propagation of the trace ID between servers."),
-    )
-
-    args = parser.parse_args()
-    if not getattr(args, "command"):
-        parser.print_help()
-        return 1
-
-    if args.command == "server":
-        server = FlightServer(
-            args.delegate,
-            location=args.listen,
-            middleware={"trace": TracingServerMiddlewareFactory()})
-        server.serve()
-    elif args.command == "client":
-        client = flight.connect(
-            args.server,
-            middleware=(TracingClientMiddlewareFactory(),))
-        if args.request_id:
-            TraceContext.set_trace_id(args.request_id)
-        else:
-            TraceContext.set_trace_id("client-chosen-id")
-
-        for result in client.do_action(flight.Action("get-trace-id", b"")):
-            print(result.body.to_pybytes())
-
-
-if __name__ == "__main__":
-    sys.exit(main() or 0)
diff --git a/python/examples/flight/server.py b/python/examples/flight/server.py
deleted file mode 100644
index 7a6b669..0000000
--- a/python/examples/flight/server.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-"""An example Flight Python server."""
-
-import argparse
-import ast
-import threading
-import time
-
-import pyarrow
-import pyarrow.flight
-
-
-class FlightServer(pyarrow.flight.FlightServerBase):
-    def __init__(self, host="localhost", location=None,
-                 tls_certificates=None, verify_client=False,
-                 root_certificates=None, auth_handler=None):
-        super(FlightServer, self).__init__(
-            location, auth_handler, tls_certificates, verify_client,
-            root_certificates)
-        self.flights = {}
-        self.host = host
-        self.tls_certificates = tls_certificates
-
-    @classmethod
-    def descriptor_to_key(self, descriptor):
-        return (descriptor.descriptor_type.value, descriptor.command,
-                tuple(descriptor.path or tuple()))
-
-    def _make_flight_info(self, key, descriptor, table):
-        if self.tls_certificates:
-            location = pyarrow.flight.Location.for_grpc_tls(
-                self.host, self.port)
-        else:
-            location = pyarrow.flight.Location.for_grpc_tcp(
-                self.host, self.port)
-        endpoints = [pyarrow.flight.FlightEndpoint(repr(key), [location]), ]
-
-        mock_sink = pyarrow.MockOutputStream()
-        stream_writer = pyarrow.RecordBatchStreamWriter(
-            mock_sink, table.schema)
-        stream_writer.write_table(table)
-        stream_writer.close()
-        data_size = mock_sink.size()
-
-        return pyarrow.flight.FlightInfo(table.schema,
-                                         descriptor, endpoints,
-                                         table.num_rows, data_size)
-
-    def list_flights(self, context, criteria):
-        for key, table in self.flights.items():
-            if key[1] is not None:
-                descriptor = \
-                    pyarrow.flight.FlightDescriptor.for_command(key[1])
-            else:
-                descriptor = pyarrow.flight.FlightDescriptor.for_path(*key[2])
-
-            yield self._make_flight_info(key, descriptor, table)
-
-    def get_flight_info(self, context, descriptor):
-        key = FlightServer.descriptor_to_key(descriptor)
-        if key in self.flights:
-            table = self.flights[key]
-            return self._make_flight_info(key, descriptor, table)
-        raise KeyError('Flight not found.')
-
-    def do_put(self, context, descriptor, reader, writer):
-        key = FlightServer.descriptor_to_key(descriptor)
-        print(key)
-        self.flights[key] = reader.read_all()
-        print(self.flights[key])
-
-    def do_get(self, context, ticket):
-        key = ast.literal_eval(ticket.ticket.decode())
-        if key not in self.flights:
-            return None
-        return pyarrow.flight.RecordBatchStream(self.flights[key])
-
-    def list_actions(self, context):
-        return [
-            ("clear", "Clear the stored flights."),
-            ("shutdown", "Shut down this server."),
-        ]
-
-    def do_action(self, context, action):
-        if action.type == "clear":
-            raise NotImplementedError(
-                "{} is not implemented.".format(action.type))
-        elif action.type == "healthcheck":
-            pass
-        elif action.type == "shutdown":
-            yield pyarrow.flight.Result(pyarrow.py_buffer(b'Shutdown!'))
-            # Shut down on background thread to avoid blocking current
-            # request
-            threading.Thread(target=self._shutdown).start()
-        else:
-            raise KeyError("Unknown action {!r}".format(action.type))
-
-    def _shutdown(self):
-        """Shut down after a delay."""
-        print("Server is shutting down...")
-        time.sleep(2)
-        self.shutdown()
-
-
-def main():
-    parser = argparse.ArgumentParser()
-    parser.add_argument("--host", type=str, default="localhost",
-                        help="Address or hostname to listen on")
-    parser.add_argument("--port", type=int, default=5005,
-                        help="Port number to listen on")
-    parser.add_argument("--tls", nargs=2, default=None,
-                        metavar=('CERTFILE', 'KEYFILE'),
-                        help="Enable transport-level security")
-    parser.add_argument("--verify_client", type=bool, default=False,
-                        help="enable mutual TLS and verify the client if True")
-
-    args = parser.parse_args()
-    tls_certificates = []
-    scheme = "grpc+tcp"
-    if args.tls:
-        scheme = "grpc+tls"
-        with open(args.tls[0], "rb") as cert_file:
-            tls_cert_chain = cert_file.read()
-        with open(args.tls[1], "rb") as key_file:
-            tls_private_key = key_file.read()
-        tls_certificates.append((tls_cert_chain, tls_private_key))
-
-    location = "{}://{}:{}".format(scheme, args.host, args.port)
-
-    server = FlightServer(args.host, location,
-                          tls_certificates=tls_certificates,
-                          verify_client=args.verify_client)
-    print("Serving on", location)
-    server.serve()
-
-
-if __name__ == '__main__':
-    main()
diff --git a/python/examples/minimal_build/Dockerfile.fedora b/python/examples/minimal_build/Dockerfile.fedora
deleted file mode 100644
index 7dc3291..0000000
--- a/python/examples/minimal_build/Dockerfile.fedora
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-FROM fedora:31
-
-RUN dnf update -y && \
-    dnf install -y \
-        autoconf \
-        gcc \
-        gcc-g++ \
-        git \
-        wget \
-        make \
-        cmake \
-        ninja-build \
-        python3-devel \
-        python3-virtualenv
\ No newline at end of file
diff --git a/python/examples/minimal_build/Dockerfile.ubuntu b/python/examples/minimal_build/Dockerfile.ubuntu
deleted file mode 100644
index d7b8408..0000000
--- a/python/examples/minimal_build/Dockerfile.ubuntu
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-FROM ubuntu:bionic
-
-ENV DEBIAN_FRONTEND=noninteractive
-
-RUN apt-get update -y -q && \
-    apt-get install -y -q --no-install-recommends \
-        apt-transport-https \
-        software-properties-common \
-        wget && \
-    apt-get install -y -q --no-install-recommends \
-      build-essential \
-      cmake \
-      git \
-      ninja-build \
-      python3-dev \
-      python3-pip && \
-      apt-get clean && rm -rf /var/lib/apt/lists*
-
-RUN pip3 install wheel && \
-    pip3 install -U setuptools && \
-    pip3 install wheel virtualenv
\ No newline at end of file
diff --git a/python/examples/minimal_build/README.md b/python/examples/minimal_build/README.md
deleted file mode 100644
index 9803e18..0000000
--- a/python/examples/minimal_build/README.md
+++ /dev/null
@@ -1,73 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Minimal Python source build on Linux
-
-This directory shows how to bootstrap a local build from source on Linux with
-an eye toward maximum portability across different Linux distributions. This
-may help for contributors debugging build issues caused by their local
-environments.
-
-## Fedora 31
-
-First, build the Docker image using:
-```
-docker build -t arrow_fedora_minimal -f Dockerfile.fedora .
-```
-
-Then build PyArrow with conda or pip/virtualenv, respectively:
-```
-# With pip/virtualenv
-docker run --rm -t -i -v $PWD:/io arrow_fedora_minimal /io/build_venv.sh
-
-# With conda
-docker run --rm -t -i -v $PWD:/io arrow_fedora_minimal /io/build_conda.sh
-```
-
-## Ubuntu 18.04
-
-First, build the Docker image using:
-```
-docker build -t arrow_ubuntu_minimal -f Dockerfile.ubuntu .
-```
-
-Then build PyArrow with conda or pip/virtualenv, respectively:
-```
-# With pip/virtualenv
-docker run --rm -t -i -v $PWD:/io arrow_ubuntu_minimal /io/build_venv.sh
-
-# With conda
-docker run --rm -t -i -v $PWD:/io arrow_ubuntu_minimal /io/build_conda.sh
-```
-
-## Building on Fedora - Podman and SELinux
-
-In addition to using Podman instead of Docker, you need to specify `:Z`
-for SELinux relabelling when binding a volume.
-
-First, build the image using:
-```
-podman build -t arrow_fedora_minimal -f Dockerfile.fedora
-```
-
-Then build PyArrow with pip/virtualenv:
-```
-# With pip/virtualenv
-podman run --rm -i -v $PWD:/io:Z -t arrow_fedora_minimal /io/build_venv.sh
-```
diff --git a/python/examples/minimal_build/build_conda.sh b/python/examples/minimal_build/build_conda.sh
deleted file mode 100755
index 6f93ebd..0000000
--- a/python/examples/minimal_build/build_conda.sh
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-set -e
-
-#----------------------------------------------------------------------
-# Change this to whatever makes sense for your system
-
-HOME=
-MINICONDA=$HOME/miniconda-for-arrow
-LIBRARY_INSTALL_DIR=$HOME/local-libs
-CPP_BUILD_DIR=$HOME/arrow-cpp-build
-ARROW_ROOT=/arrow
-PYTHON=3.7
-
-git clone https://github.com/apache/arrow.git /arrow
-
-#----------------------------------------------------------------------
-# Run these only once
-
-function setup_miniconda() {
-  MINICONDA_URL="https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh"
-  wget -O miniconda.sh $MINICONDA_URL
-  bash miniconda.sh -b -p $MINICONDA
-  rm -f miniconda.sh
-  LOCAL_PATH=$PATH
-  export PATH="$MINICONDA/bin:$PATH"
-
-  conda update -y -q conda
-  conda config --set auto_update_conda false
-  conda info -a
-
-  conda config --set show_channel_urls True
-  conda config --add channels https://repo.continuum.io/pkgs/free
-  conda config --add channels conda-forge
-
-  conda create -y -n pyarrow-$PYTHON -c conda-forge \
-        --file arrow/ci/conda_env_unix.yml \
-        --file arrow/ci/conda_env_cpp.yml \
-        --file arrow/ci/conda_env_python.yml \
-        compilers \
-        python=3.7 \
-        pandas
-
-  export PATH=$LOCAL_PATH
-}
-
-setup_miniconda
-
-#----------------------------------------------------------------------
-# Activate conda in bash and activate conda environment
-
-. $MINICONDA/etc/profile.d/conda.sh
-conda activate pyarrow-$PYTHON
-export ARROW_HOME=$CONDA_PREFIX
-
-#----------------------------------------------------------------------
-# Build C++ library
-
-mkdir -p $CPP_BUILD_DIR
-pushd $CPP_BUILD_DIR
-
-cmake -GNinja \
-      -DCMAKE_BUILD_TYPE=DEBUG \
-      -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \
-      -DCMAKE_INSTALL_LIBDIR=lib \
-      -DARROW_FLIGHT=ON \
-      -DARROW_WITH_BZ2=ON \
-      -DARROW_WITH_ZLIB=ON \
-      -DARROW_WITH_ZSTD=ON \
-      -DARROW_WITH_LZ4=ON \
-      -DARROW_WITH_SNAPPY=ON \
-      -DARROW_WITH_BROTLI=ON \
-      -DARROW_PARQUET=ON \
-      -DARROW_PLASMA=ON \
-      -DARROW_PYTHON=ON \
-      $ARROW_ROOT/cpp
-
-ninja install
-
-popd
-
-#----------------------------------------------------------------------
-# Build and test Python library
-pushd $ARROW_ROOT/python
-
-rm -rf build/  # remove any pesky pre-existing build directory
-
-export PYARROW_BUILD_TYPE=Debug
-export PYARROW_CMAKE_GENERATOR=Ninja
-export PYARROW_WITH_FLIGHT=1
-export PYARROW_WITH_PARQUET=1
-
-# You can run either "develop" or "build_ext --inplace". Your pick
-
-# python setup.py build_ext --inplace
-python setup.py develop
-
-# git submodules are required for unit tests
-git submodule update --init
-export PARQUET_TEST_DATA="$ARROW_ROOT/cpp/submodules/parquet-testing/data"
-export ARROW_TEST_DATA="$ARROW_ROOT/testing/data"
-
-py.test pyarrow
diff --git a/python/examples/minimal_build/build_venv.sh b/python/examples/minimal_build/build_venv.sh
deleted file mode 100755
index afa4206..0000000
--- a/python/examples/minimal_build/build_venv.sh
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-set -e
-
-#----------------------------------------------------------------------
-# Change this to whatever makes sense for your system
-
-WORKDIR=${WORKDIR:-$HOME}
-MINICONDA=$WORKDIR/miniconda-for-arrow
-LIBRARY_INSTALL_DIR=$WORKDIR/local-libs
-CPP_BUILD_DIR=$WORKDIR/arrow-cpp-build
-ARROW_ROOT=$WORKDIR/arrow
-export ARROW_HOME=$WORKDIR/dist
-export LD_LIBRARY_PATH=$ARROW_HOME/lib:$LD_LIBRARY_PATH
-
-virtualenv $WORKDIR/venv
-source $WORKDIR/venv/bin/activate
-
-git clone https://github.com/apache/arrow.git $ARROW_ROOT
-
-pip install -r $ARROW_ROOT/python/requirements-build.txt \
-     -r $ARROW_ROOT/python/requirements-test.txt
-
-#----------------------------------------------------------------------
-# Build C++ library
-
-mkdir -p $CPP_BUILD_DIR
-pushd $CPP_BUILD_DIR
-
-cmake -GNinja \
-      -DCMAKE_BUILD_TYPE=DEBUG \
-      -DCMAKE_INSTALL_PREFIX=$ARROW_HOME \
-      -DCMAKE_INSTALL_LIBDIR=lib \
-      -DARROW_WITH_BZ2=ON \
-      -DARROW_WITH_ZLIB=ON \
-      -DARROW_WITH_ZSTD=ON \
-      -DARROW_WITH_LZ4=ON \
-      -DARROW_WITH_SNAPPY=ON \
-      -DARROW_WITH_BROTLI=ON \
-      -DARROW_PARQUET=ON \
-      -DARROW_PYTHON=ON \
-      $ARROW_ROOT/cpp
-
-ninja install
-
-popd
-
-#----------------------------------------------------------------------
-# Build and test Python library
-pushd $ARROW_ROOT/python
-
-rm -rf build/  # remove any pesky pre-existing build directory
-
-export PYARROW_BUILD_TYPE=Debug
-export PYARROW_CMAKE_GENERATOR=Ninja
-export PYARROW_WITH_PARQUET=1
-
-# You can run either "develop" or "build_ext --inplace". Your pick
-
-# python setup.py build_ext --inplace
-python setup.py develop
-
-# git submodules are required for unit tests
-git submodule update --init
-export PARQUET_TEST_DATA="$ARROW_ROOT/cpp/submodules/parquet-testing/data"
-export ARROW_TEST_DATA="$ARROW_ROOT/testing/data"
-
-py.test pyarrow
diff --git a/python/examples/plasma/sorting/multimerge.pyx b/python/examples/plasma/sorting/multimerge.pyx
deleted file mode 100644
index 5e77fdf..0000000
--- a/python/examples/plasma/sorting/multimerge.pyx
+++ /dev/null
@@ -1,102 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# cython: profile=False
-# distutils: language = c++
-# cython: embedsignature = True
-
-from libc.stdint cimport uintptr_t
-from libcpp.vector cimport vector
-from libcpp.pair cimport pair
-
-import numpy as np
-
-cimport numpy as np
-
-cdef extern from "<queue>" namespace "std" nogil:
-    cdef cppclass priority_queue[T]:
-        priority_queue() except +
-        priority_queue(priority_queue&) except +
-        bint empty()
-        void pop()
-        void push(T&)
-        size_t size()
-        T& top()
-
-
-def multimerge2d(*arrays):
-    """Merge a list of sorted 2d arrays into a sorted 2d array.
-
-    This assumes C style ordering for both input and output arrays. For
-    each input array we have array[i,0] <= array[i+1,0] and for the output
-    array the same will hold.
-
-    Ideally this code would be simpler and also support both C style
-    and Fortran style ordering.
-    """
-    cdef int num_arrays = len(arrays)
-    assert num_arrays > 0
-
-    cdef int num_cols = arrays[0].shape[1]
-
-    for i in range(num_arrays):
-        assert arrays[i].ndim == 2
-        assert arrays[i].dtype == np.float64
-        assert arrays[i].shape[1] == num_cols
-        assert not np.isfortran(arrays[i])
-
-    cdef vector[double*] data
-
-    # The indices vector keeps track of the index of the next row to process in
-    # each array.
-    cdef vector[int] indices = num_arrays * [0]
-
-    # The sizes vector stores the total number of elements that each array has.
-    cdef vector[int] sizes
-
-    cdef priority_queue[pair[double, int]] queue
-    cdef pair[double, int] top
-    cdef int num_rows = sum([array.shape[0] for array in arrays])
-    cdef np.ndarray[np.float64_t, ndim=2] result = np.zeros(
-        (num_rows, num_cols), dtype=np.float64)
-    cdef double* result_ptr = <double*> np.PyArray_DATA(result)
-    for i in range(num_arrays):
-        if arrays[i].size > 0:
-            sizes.push_back(arrays[i].size)
-            data.push_back(<double*> np.PyArray_DATA(arrays[i]))
-            queue.push(pair[double, int](-data[i][0], i))
-
-    cdef int curr_idx = 0
-    cdef int j
-    cdef int col = 0
-
-    for j in range(num_rows):
-        top = queue.top()
-        for col in range(num_cols):
-            result_ptr[curr_idx + col] = (
-                data[top.second][indices[top.second] + col])
-
-        indices[top.second] += num_cols
-        curr_idx += num_cols
-
-        queue.pop()
-        if indices[top.second] < sizes[top.second]:
-            queue.push(
-                pair[double, int](-data[top.second][indices[top.second]],
-                                  top.second))
-
-    return result
diff --git a/python/examples/plasma/sorting/setup.py b/python/examples/plasma/sorting/setup.py
deleted file mode 100644
index a5dfa5a..0000000
--- a/python/examples/plasma/sorting/setup.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import numpy as np
-from setuptools import setup
-from Cython.Build import cythonize
-
-setup(
-    name="multimerge",
-    extra_compile_args=["-O3", "-mtune=native", "-march=native"],
-    ext_modules=cythonize("multimerge.pyx"),
-    include_dirs=[np.get_include()],
-)
diff --git a/python/examples/plasma/sorting/sort_df.py b/python/examples/plasma/sorting/sort_df.py
deleted file mode 100644
index 2a51759..0000000
--- a/python/examples/plasma/sorting/sort_df.py
+++ /dev/null
@@ -1,203 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from multiprocessing import Pool
-import numpy as np
-import pandas as pd
-import pyarrow as pa
-import pyarrow.plasma as plasma
-import subprocess
-import time
-
-import multimerge
-
-# To run this example, you will first need to run "python setup.py install" in
-# this directory to build the Cython module.
-#
-# You will only see speedups if you run this code on more data, this is just a
-# small example that can run on a laptop.
-#
-# The values we used to get a speedup (on a m4.10xlarge instance on EC2) were
-#     object_store_size = 84 * 10 ** 9
-#     num_cores = 20
-#     num_rows = 10 ** 9
-#     num_cols = 1
-
-client = None
-object_store_size = 2 * 10 ** 9  # 2 GB
-num_cores = 8
-num_rows = 200000
-num_cols = 2
-column_names = [str(i) for i in range(num_cols)]
-column_to_sort = column_names[0]
-
-
-# Connect to clients
-def connect():
-    global client
-    client = plasma.connect('/tmp/store')
-    np.random.seed(int(time.time() * 10e7) % 10000000)
-
-
-def put_df(df):
-    record_batch = pa.RecordBatch.from_pandas(df)
-
-    # Get size of record batch and schema
-    mock_sink = pa.MockOutputStream()
-    stream_writer = pa.RecordBatchStreamWriter(mock_sink, record_batch.schema)
-    stream_writer.write_batch(record_batch)
-    data_size = mock_sink.size()
-
-    # Generate an ID and allocate a buffer in the object store for the
-    # serialized DataFrame
-    object_id = plasma.ObjectID(np.random.bytes(20))
-    buf = client.create(object_id, data_size)
-
-    # Write the serialized DataFrame to the object store
-    sink = pa.FixedSizeBufferWriter(buf)
-    stream_writer = pa.RecordBatchStreamWriter(sink, record_batch.schema)
-    stream_writer.write_batch(record_batch)
-
-    # Seal the object
-    client.seal(object_id)
-
-    return object_id
-
-
-def get_dfs(object_ids):
-    """Retrieve dataframes from the object store given their object IDs."""
-    buffers = client.get_buffers(object_ids)
-    return [pa.RecordBatchStreamReader(buf).read_next_batch().to_pandas()
-            for buf in buffers]
-
-
-def local_sort(object_id):
-    """Sort a partition of a dataframe."""
-    # Get the dataframe from the object store.
-    [df] = get_dfs([object_id])
-    # Sort the dataframe.
-    sorted_df = df.sort_values(by=column_to_sort)
-    # Get evenly spaced values from the dataframe.
-    indices = np.linspace(0, len(df) - 1, num=num_cores, dtype=np.int64)
-    # Put the sorted dataframe in the object store and return the corresponding
-    # object ID as well as the sampled values.
-    return put_df(sorted_df), sorted_df.as_matrix().take(indices)
-
-
-def local_partitions(object_id_and_pivots):
-    """Take a sorted partition of a dataframe and split it into more pieces."""
-    object_id, pivots = object_id_and_pivots
-    [df] = get_dfs([object_id])
-    split_at = df[column_to_sort].searchsorted(pivots)
-    split_at = [0] + list(split_at) + [len(df)]
-    # Partition the sorted dataframe and put each partition into the object
-    # store.
-    return [put_df(df[i:j]) for i, j in zip(split_at[:-1], split_at[1:])]
-
-
-def merge(object_ids):
-    """Merge a number of sorted dataframes into a single sorted dataframe."""
-    dfs = get_dfs(object_ids)
-
-    # In order to use our multimerge code, we have to convert the arrays from
-    # the Fortran format to the C format.
-    arrays = [np.ascontiguousarray(df.as_matrix()) for df in dfs]
-    for a in arrays:
-        assert a.dtype == np.float64
-        assert not np.isfortran(a)
-
-    # Filter out empty arrays.
-    arrays = [a for a in arrays if a.shape[0] > 0]
-
-    if len(arrays) == 0:
-        return None
-
-    resulting_array = multimerge.multimerge2d(*arrays)
-    merged_df2 = pd.DataFrame(resulting_array, columns=column_names)
-
-    return put_df(merged_df2)
-
-
-if __name__ == '__main__':
-    # Start the plasma store.
-    p = subprocess.Popen(['plasma_store',
-                          '-s', '/tmp/store',
-                          '-m', str(object_store_size)])
-
-    # Connect to the plasma store.
-    connect()
-
-    # Connect the processes in the pool.
-    pool = Pool(initializer=connect, initargs=(), processes=num_cores)
-
-    # Create a DataFrame from a numpy array.
-    df = pd.DataFrame(np.random.randn(num_rows, num_cols),
-                      columns=column_names)
-
-    partition_ids = [put_df(partition) for partition
-                     in np.split(df, num_cores)]
-
-    # Begin timing the parallel sort example.
-    parallel_sort_start = time.time()
-
-    # Sort each partition and subsample them. The subsampled values will be
-    # used to create buckets.
-    sorted_df_ids, pivot_groups = list(zip(*pool.map(local_sort,
-                                                     partition_ids)))
-
-    # Choose the pivots.
-    all_pivots = np.concatenate(pivot_groups)
-    indices = np.linspace(0, len(all_pivots) - 1, num=num_cores,
-                          dtype=np.int64)
-    pivots = np.take(np.sort(all_pivots), indices)
-
-    # Break all of the sorted partitions into even smaller partitions. Group
-    # the object IDs from each bucket together.
-    results = list(zip(*pool.map(local_partitions,
-                                 zip(sorted_df_ids,
-                                     len(sorted_df_ids) * [pivots]))))
-
-    # Merge each of the buckets and store the results in the object store.
-    object_ids = pool.map(merge, results)
-
-    resulting_ids = [object_id for object_id in object_ids
-                     if object_id is not None]
-
-    # Stop timing the paralle sort example.
-    parallel_sort_end = time.time()
-
-    print('Parallel sort took {} seconds.'
-          .format(parallel_sort_end - parallel_sort_start))
-
-    serial_sort_start = time.time()
-
-    original_sorted_df = df.sort_values(by=column_to_sort)
-
-    serial_sort_end = time.time()
-
-    # Check that we sorted the DataFrame properly.
-
-    sorted_dfs = get_dfs(resulting_ids)
-    sorted_df = pd.concat(sorted_dfs)
-
-    print('Serial sort took {} seconds.'
-          .format(serial_sort_end - serial_sort_start))
-
-    assert np.allclose(sorted_df.values, original_sorted_df.values)
-
-    # Kill the object store.
-    p.kill()
diff --git a/python/pyarrow/__init__.pxd b/python/pyarrow/__init__.pxd
deleted file mode 100644
index 8cc54b4..0000000
--- a/python/pyarrow/__init__.pxd
+++ /dev/null
@@ -1,42 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from libcpp.memory cimport shared_ptr
-from pyarrow.includes.libarrow cimport (CArray, CBuffer, CDataType,
-                                        CField, CRecordBatch, CSchema,
-                                        CTable, CTensor, CSparseCOOTensor,
-                                        CSparseCSRMatrix, CSparseCSCMatrix,
-                                        CSparseCSFTensor)
-
-cdef extern from "arrow/python/pyarrow.h" namespace "arrow::py":
-    cdef int import_pyarrow() except -1
-    cdef object wrap_buffer(const shared_ptr[CBuffer]& buffer)
-    cdef object wrap_data_type(const shared_ptr[CDataType]& type)
-    cdef object wrap_field(const shared_ptr[CField]& field)
-    cdef object wrap_schema(const shared_ptr[CSchema]& schema)
-    cdef object wrap_array(const shared_ptr[CArray]& sp_array)
-    cdef object wrap_tensor(const shared_ptr[CTensor]& sp_tensor)
-    cdef object wrap_sparse_tensor_coo(
-        const shared_ptr[CSparseCOOTensor]& sp_sparse_tensor)
-    cdef object wrap_sparse_tensor_csr(
-        const shared_ptr[CSparseCSRMatrix]& sp_sparse_tensor)
-    cdef object wrap_sparse_tensor_csc(
-        const shared_ptr[CSparseCSCMatrix]& sp_sparse_tensor)
-    cdef object wrap_sparse_tensor_csf(
-        const shared_ptr[CSparseCSFTensor]& sp_sparse_tensor)
-    cdef object wrap_table(const shared_ptr[CTable]& ctable)
-    cdef object wrap_batch(const shared_ptr[CRecordBatch]& cbatch)
diff --git a/python/pyarrow/__init__.py b/python/pyarrow/__init__.py
deleted file mode 100644
index adfd69c..0000000
--- a/python/pyarrow/__init__.py
+++ /dev/null
@@ -1,504 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# flake8: noqa
-
-"""
-PyArrow is the python implementation of Apache Arrow.
-
-Apache Arrow is a cross-language development platform for in-memory data.
-It specifies a standardized language-independent columnar memory format for
-flat and hierarchical data, organized for efficient analytic operations on
-modern hardware. It also provides computational libraries and zero-copy
-streaming messaging and interprocess communication.
-
-For more information see the official page at https://arrow.apache.org
-"""
-
-import gc as _gc
-import os as _os
-import sys as _sys
-import warnings as _warnings
-
-try:
-    from ._generated_version import version as __version__
-except ImportError:
-    # Package is not installed, parse git tag at runtime
-    try:
-        import setuptools_scm
-        # Code duplicated from setup.py to avoid a dependency on each other
-
-        def parse_git(root, **kwargs):
-            """
-            Parse function for setuptools_scm that ignores tags for non-C++
-            subprojects, e.g. apache-arrow-js-XXX tags.
-            """
-            from setuptools_scm.git import parse
-            kwargs['describe_command'] = \
-                "git describe --dirty --tags --long --match 'apache-arrow-[0-9].*'"
-            return parse(root, **kwargs)
-        __version__ = setuptools_scm.get_version('../',
-                                                 parse=parse_git)
-    except ImportError:
-        __version__ = None
-
-# ARROW-8684: Disable GC while initializing Cython extension module,
-# to workaround Cython bug in https://github.com/cython/cython/issues/3603
-_gc_enabled = _gc.isenabled()
-_gc.disable()
-import pyarrow.lib as _lib
-if _gc_enabled:
-    _gc.enable()
-
-from pyarrow.lib import (BuildInfo, RuntimeInfo, VersionInfo,
-                         cpp_build_info, cpp_version, cpp_version_info,
-                         runtime_info, cpu_count, set_cpu_count,
-                         enable_signal_handlers)
-
-
-def show_versions():
-    """
-    Print various version information, to help with error reporting.
-    """
-    # TODO: CPU information and flags
-    print("pyarrow version info\n--------------------")
-    print("Package kind: {}".format(cpp_build_info.package_kind
-                                    if len(cpp_build_info.package_kind) > 0
-                                    else "not indicated"))
-    print("Arrow C++ library version: {0}".format(cpp_build_info.version))
-    print("Arrow C++ compiler: {0} {1}"
-          .format(cpp_build_info.compiler_id, cpp_build_info.compiler_version))
-    print("Arrow C++ compiler flags: {0}"
-          .format(cpp_build_info.compiler_flags))
-    print("Arrow C++ git revision: {0}".format(cpp_build_info.git_id))
-    print("Arrow C++ git description: {0}"
-          .format(cpp_build_info.git_description))
-
-
-from pyarrow.lib import (null, bool_,
-                         int8, int16, int32, int64,
-                         uint8, uint16, uint32, uint64,
-                         time32, time64, timestamp, date32, date64, duration,
-                         float16, float32, float64,
-                         binary, string, utf8,
-                         large_binary, large_string, large_utf8,
-                         decimal128, decimal256,
-                         list_, large_list, map_, struct,
-                         union, sparse_union, dense_union,
-                         dictionary,
-                         field,
-                         type_for_alias,
-                         DataType, DictionaryType, StructType,
-                         ListType, LargeListType, MapType, FixedSizeListType,
-                         UnionType, SparseUnionType, DenseUnionType,
-                         TimestampType, Time32Type, Time64Type, DurationType,
-                         FixedSizeBinaryType, Decimal128Type, Decimal256Type,
-                         BaseExtensionType, ExtensionType,
-                         PyExtensionType, UnknownExtensionType,
-                         register_extension_type, unregister_extension_type,
-                         DictionaryMemo,
-                         KeyValueMetadata,
-                         Field,
-                         Schema,
-                         schema,
-                         unify_schemas,
-                         Array, Tensor,
-                         array, chunked_array, record_batch, nulls, repeat,
-                         SparseCOOTensor, SparseCSRMatrix, SparseCSCMatrix,
-                         SparseCSFTensor,
-                         infer_type, from_numpy_dtype,
-                         NullArray,
-                         NumericArray, IntegerArray, FloatingPointArray,
-                         BooleanArray,
-                         Int8Array, UInt8Array,
-                         Int16Array, UInt16Array,
-                         Int32Array, UInt32Array,
-                         Int64Array, UInt64Array,
-                         ListArray, LargeListArray, MapArray,
-                         FixedSizeListArray, UnionArray,
-                         BinaryArray, StringArray,
-                         LargeBinaryArray, LargeStringArray,
-                         FixedSizeBinaryArray,
-                         DictionaryArray,
-                         Date32Array, Date64Array, TimestampArray,
-                         Time32Array, Time64Array, DurationArray,
-                         Decimal128Array, Decimal256Array, StructArray, ExtensionArray,
-                         scalar, NA, _NULL as NULL, Scalar,
-                         NullScalar, BooleanScalar,
-                         Int8Scalar, Int16Scalar, Int32Scalar, Int64Scalar,
-                         UInt8Scalar, UInt16Scalar, UInt32Scalar, UInt64Scalar,
-                         HalfFloatScalar, FloatScalar, DoubleScalar,
-                         Decimal128Scalar, Decimal256Scalar,
-                         ListScalar, LargeListScalar, FixedSizeListScalar,
-                         Date32Scalar, Date64Scalar,
-                         Time32Scalar, Time64Scalar,
-                         BinaryScalar, LargeBinaryScalar,
-                         StringScalar, LargeStringScalar,
-                         FixedSizeBinaryScalar, DictionaryScalar,
-                         MapScalar, UnionScalar, StructScalar,
-                         TimestampScalar, DurationScalar)
-
-# Buffers, allocation
-from pyarrow.lib import (Buffer, ResizableBuffer, foreign_buffer, py_buffer,
-                         Codec, compress, decompress, allocate_buffer)
-
-from pyarrow.lib import (MemoryPool, LoggingMemoryPool, ProxyMemoryPool,
-                         total_allocated_bytes, set_memory_pool,
-                         default_memory_pool, system_memory_pool,
-                         jemalloc_memory_pool, mimalloc_memory_pool,
-                         logging_memory_pool, proxy_memory_pool,
-                         log_memory_allocations, jemalloc_set_decay_ms)
-
-# I/O
-from pyarrow.lib import (HdfsFile, NativeFile, PythonFile,
-                         BufferedInputStream, BufferedOutputStream,
-                         CompressedInputStream, CompressedOutputStream,
-                         TransformInputStream, transcoding_input_stream,
-                         FixedSizeBufferWriter,
-                         BufferReader, BufferOutputStream,
-                         OSFile, MemoryMappedFile, memory_map,
-                         create_memory_map, have_libhdfs,
-                         MockOutputStream, input_stream, output_stream)
-
-from pyarrow.lib import (ChunkedArray, RecordBatch, Table, table,
-                         concat_arrays, concat_tables)
-
-# Exceptions
-from pyarrow.lib import (ArrowCancelled,
-                         ArrowCapacityError,
-                         ArrowException,
-                         ArrowKeyError,
-                         ArrowIndexError,
-                         ArrowInvalid,
-                         ArrowIOError,
-                         ArrowMemoryError,
-                         ArrowNotImplementedError,
-                         ArrowTypeError,
-                         ArrowSerializationError)
-
-# Serialization
-from pyarrow.lib import (deserialize_from, deserialize,
-                         deserialize_components,
-                         serialize, serialize_to, read_serialized,
-                         SerializationCallbackError,
-                         DeserializationCallbackError)
-
-import pyarrow.hdfs as hdfs
-
-from pyarrow.ipc import serialize_pandas, deserialize_pandas
-import pyarrow.ipc as ipc
-
-from pyarrow.serialization import (default_serialization_context,
-                                   register_default_serialization_handlers,
-                                   register_torch_serialization_handlers)
-
-import pyarrow.types as types
-
-
-# deprecated top-level access
-
-
-from pyarrow.filesystem import FileSystem as _FileSystem
-from pyarrow.filesystem import LocalFileSystem as _LocalFileSystem
-from pyarrow.hdfs import HadoopFileSystem as _HadoopFileSystem
-
-from pyarrow.lib import SerializationContext as _SerializationContext
-from pyarrow.lib import SerializedPyObject as _SerializedPyObject
-
-
-_localfs = _LocalFileSystem._get_instance()
-
-
-_msg = (
-    "pyarrow.{0} is deprecated as of 2.0.0, please use pyarrow.fs.{1} instead."
-)
-
-_serialization_msg = (
-    "'pyarrow.{0}' is deprecated and will be removed in a future version. "
-    "Use pickle or the pyarrow IPC functionality instead."
-)
-
-_deprecated = {
-    "localfs": (_localfs, "LocalFileSystem"),
-    "FileSystem": (_FileSystem, "FileSystem"),
-    "LocalFileSystem": (_LocalFileSystem, "LocalFileSystem"),
-    "HadoopFileSystem": (_HadoopFileSystem, "HadoopFileSystem"),
-}
-
-_serialization_deprecatd = {
-    "SerializationContext": _SerializationContext,
-    "SerializedPyObject": _SerializedPyObject,
-}
-
-if _sys.version_info >= (3, 7):
-    def __getattr__(name):
-        if name in _deprecated:
-            obj, new_name = _deprecated[name]
-            _warnings.warn(_msg.format(name, new_name),
-                           FutureWarning, stacklevel=2)
-            return obj
-        elif name in _serialization_deprecatd:
-            _warnings.warn(_serialization_msg.format(name),
-                           FutureWarning, stacklevel=2)
-            return _serialization_deprecatd[name]
-
-        raise AttributeError(
-            "module 'pyarrow' has no attribute '{0}'".format(name)
-        )
-else:
-    localfs = _localfs
-    FileSystem = _FileSystem
-    LocalFileSystem = _LocalFileSystem
-    HadoopFileSystem = _HadoopFileSystem
-    SerializationContext = _SerializationContext
-    SerializedPyObject = _SerializedPyObject
-
-
-# Entry point for starting the plasma store
-
-
-def _plasma_store_entry_point():
-    """Entry point for starting the plasma store.
-
-    This can be used by invoking e.g.
-    ``plasma_store -s /tmp/plasma -m 1000000000``
-    from the command line and will start the plasma_store executable with the
-    given arguments.
-    """
-    import pyarrow
-    plasma_store_executable = _os.path.join(pyarrow.__path__[0],
-                                            "plasma-store-server")
-    _os.execv(plasma_store_executable, _sys.argv)
-
-
-# ----------------------------------------------------------------------
-# Deprecations
-
-from pyarrow.util import _deprecate_api, _deprecate_class
-
-read_message = _deprecate_api("read_message", "ipc.read_message",
-                              ipc.read_message, "0.17.0")
-
-read_record_batch = _deprecate_api("read_record_batch",
-                                   "ipc.read_record_batch",
-                                   ipc.read_record_batch, "0.17.0")
-
-read_schema = _deprecate_api("read_schema", "ipc.read_schema",
-                             ipc.read_schema, "0.17.0")
-
-read_tensor = _deprecate_api("read_tensor", "ipc.read_tensor",
-                             ipc.read_tensor, "0.17.0")
-
-write_tensor = _deprecate_api("write_tensor", "ipc.write_tensor",
-                              ipc.write_tensor, "0.17.0")
-
-get_record_batch_size = _deprecate_api("get_record_batch_size",
-                                       "ipc.get_record_batch_size",
-                                       ipc.get_record_batch_size, "0.17.0")
-
-get_tensor_size = _deprecate_api("get_tensor_size",
-                                 "ipc.get_tensor_size",
-                                 ipc.get_tensor_size, "0.17.0")
-
-open_stream = _deprecate_api("open_stream", "ipc.open_stream",
-                             ipc.open_stream, "0.17.0")
-
-open_file = _deprecate_api("open_file", "ipc.open_file", ipc.open_file,
-                           "0.17.0")
-
-
-def _deprecate_scalar(ty, symbol):
-    return _deprecate_class("{}Value".format(ty), symbol, "1.0.0")
-
-
-ArrayValue = _deprecate_class("ArrayValue", Scalar, "1.0.0")
-NullType = _deprecate_class("NullType", NullScalar, "1.0.0")
-
-BooleanValue = _deprecate_scalar("Boolean", BooleanScalar)
-Int8Value = _deprecate_scalar("Int8", Int8Scalar)
-Int16Value = _deprecate_scalar("Int16", Int16Scalar)
-Int32Value = _deprecate_scalar("Int32", Int32Scalar)
-Int64Value = _deprecate_scalar("Int64", Int64Scalar)
-UInt8Value = _deprecate_scalar("UInt8", UInt8Scalar)
-UInt16Value = _deprecate_scalar("UInt16", UInt16Scalar)
-UInt32Value = _deprecate_scalar("UInt32", UInt32Scalar)
-UInt64Value = _deprecate_scalar("UInt64", UInt64Scalar)
-HalfFloatValue = _deprecate_scalar("HalfFloat", HalfFloatScalar)
-FloatValue = _deprecate_scalar("Float", FloatScalar)
-DoubleValue = _deprecate_scalar("Double", DoubleScalar)
-ListValue = _deprecate_scalar("List", ListScalar)
-LargeListValue = _deprecate_scalar("LargeList", LargeListScalar)
-MapValue = _deprecate_scalar("Map", MapScalar)
-FixedSizeListValue = _deprecate_scalar("FixedSizeList", FixedSizeListScalar)
-BinaryValue = _deprecate_scalar("Binary", BinaryScalar)
-StringValue = _deprecate_scalar("String", StringScalar)
-LargeBinaryValue = _deprecate_scalar("LargeBinary", LargeBinaryScalar)
-LargeStringValue = _deprecate_scalar("LargeString", LargeStringScalar)
-FixedSizeBinaryValue = _deprecate_scalar("FixedSizeBinary",
-                                         FixedSizeBinaryScalar)
-Decimal128Value = _deprecate_scalar("Decimal128", Decimal128Scalar)
-Decimal256Value = _deprecate_scalar("Decimal256", Decimal256Scalar)
-UnionValue = _deprecate_scalar("Union", UnionScalar)
-StructValue = _deprecate_scalar("Struct", StructScalar)
-DictionaryValue = _deprecate_scalar("Dictionary", DictionaryScalar)
-Date32Value = _deprecate_scalar("Date32", Date32Scalar)
-Date64Value = _deprecate_scalar("Date64", Date64Scalar)
-Time32Value = _deprecate_scalar("Time32", Time32Scalar)
-Time64Value = _deprecate_scalar("Time64", Time64Scalar)
-TimestampValue = _deprecate_scalar("Timestamp", TimestampScalar)
-DurationValue = _deprecate_scalar("Duration", DurationScalar)
-
-
-# TODO: Deprecate these somehow in the pyarrow namespace
-from pyarrow.ipc import (Message, MessageReader, MetadataVersion,
-                         RecordBatchFileReader, RecordBatchFileWriter,
-                         RecordBatchStreamReader, RecordBatchStreamWriter)
-
-# ----------------------------------------------------------------------
-# Returning absolute path to the pyarrow include directory (if bundled, e.g. in
-# wheels)
-
-
-def get_include():
-    """
-    Return absolute path to directory containing Arrow C++ include
-    headers. Similar to numpy.get_include
-    """
-    return _os.path.join(_os.path.dirname(__file__), 'include')
-
-
-def _get_pkg_config_executable():
-    return _os.environ.get('PKG_CONFIG', 'pkg-config')
-
-
-def _has_pkg_config(pkgname):
-    import subprocess
-    try:
-        return subprocess.call([_get_pkg_config_executable(),
-                                '--exists', pkgname]) == 0
-    except FileNotFoundError:
-        return False
-
-
-def _read_pkg_config_variable(pkgname, cli_args):
-    import subprocess
-    cmd = [_get_pkg_config_executable(), pkgname] + cli_args
-    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE)
-    out, err = proc.communicate()
-    if proc.returncode != 0:
-        raise RuntimeError("pkg-config failed: " + err.decode('utf8'))
-    return out.rstrip().decode('utf8')
-
-
-def get_libraries():
-    """
-    Return list of library names to include in the `libraries` argument for C
-    or Cython extensions using pyarrow
-    """
-    return ['arrow', 'arrow_python']
-
-
-def create_library_symlinks():
-    """
-    With Linux and macOS wheels, the bundled shared libraries have an embedded
-    ABI version like libarrow.so.17 or libarrow.17.dylib and so linking to them
-    with -larrow won't work unless we create symlinks at locations like
-    site-packages/pyarrow/libarrow.so. This unfortunate workaround addresses
-    prior problems we had with shipping two copies of the shared libraries to
-    permit third party projects like turbodbc to build their C++ extensions
-    against the pyarrow wheels.
-
-    This function must only be invoked once and only when the shared libraries
-    are bundled with the Python package, which should only apply to wheel-based
-    installs. It requires write access to the site-packages/pyarrow directory
-    and so depending on your system may need to be run with root.
-    """
-    import glob
-    if _sys.platform == 'win32':
-        return
-    package_cwd = _os.path.dirname(__file__)
-
-    if _sys.platform == 'linux':
-        bundled_libs = glob.glob(_os.path.join(package_cwd, '*.so.*'))
-
-        def get_symlink_path(hard_path):
-            return hard_path.rsplit('.', 1)[0]
-    else:
-        bundled_libs = glob.glob(_os.path.join(package_cwd, '*.*.dylib'))
-
-        def get_symlink_path(hard_path):
-            return '.'.join((hard_path.rsplit('.', 2)[0], 'dylib'))
-
-    for lib_hard_path in bundled_libs:
-        symlink_path = get_symlink_path(lib_hard_path)
-        if _os.path.exists(symlink_path):
-            continue
-        try:
-            _os.symlink(lib_hard_path, symlink_path)
-        except PermissionError:
-            print("Tried creating symlink {}. If you need to link to "
-                  "bundled shared libraries, run "
-                  "pyarrow.create_library_symlinks() as root")
-
-
-def get_library_dirs():
-    """
-    Return lists of directories likely to contain Arrow C++ libraries for
-    linking C or Cython extensions using pyarrow
-    """
-    package_cwd = _os.path.dirname(__file__)
-    library_dirs = [package_cwd]
-
-    def append_library_dir(library_dir):
-        if library_dir not in library_dirs:
-            library_dirs.append(library_dir)
-
-    # Search library paths via pkg-config. This is necessary if the user
-    # installed libarrow and the other shared libraries manually and they
-    # are not shipped inside the pyarrow package (see also ARROW-2976).
-    pkg_config_executable = _os.environ.get('PKG_CONFIG') or 'pkg-config'
-    for pkgname in ["arrow", "arrow_python"]:
-        if _has_pkg_config(pkgname):
-            library_dir = _read_pkg_config_variable(pkgname,
-                                                    ["--libs-only-L"])
-            # pkg-config output could be empty if Arrow is installed
-            # as a system package.
-            if library_dir:
-                if not library_dir.startswith("-L"):
-                    raise ValueError(
-                        "pkg-config --libs-only-L returned unexpected "
-                        "value {!r}".format(library_dir))
-                append_library_dir(library_dir[2:])
-
-    if _sys.platform == 'win32':
-        # TODO(wesm): Is this necessary, or does setuptools within a conda
-        # installation add Library\lib to the linker path for MSVC?
-        python_base_install = _os.path.dirname(_sys.executable)
-        library_dir = _os.path.join(python_base_install, 'Library', 'lib')
-
-        if _os.path.exists(_os.path.join(library_dir, 'arrow.lib')):
-            append_library_dir(library_dir)
-
-    # ARROW-4074: Allow for ARROW_HOME to be set to some other directory
-    if _os.environ.get('ARROW_HOME'):
-        append_library_dir(_os.path.join(_os.environ['ARROW_HOME'], 'lib'))
-    else:
-        # Python wheels bundle the Arrow libraries in the pyarrow directory.
-        append_library_dir(_os.path.dirname(_os.path.abspath(__file__)))
-
-    return library_dirs
diff --git a/python/pyarrow/_compute.pxd b/python/pyarrow/_compute.pxd
deleted file mode 100644
index e187ed7..0000000
--- a/python/pyarrow/_compute.pxd
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# cython: language_level = 3
-
-from pyarrow.lib cimport *
-from pyarrow.includes.common cimport *
-from pyarrow.includes.libarrow cimport *
-
-
-cdef class FunctionOptions(_Weakrefable):
-
-    cdef const CFunctionOptions* get_options(self) except NULL
diff --git a/python/pyarrow/_compute.pyx b/python/pyarrow/_compute.pyx
deleted file mode 100644
index 1515bdc..0000000
--- a/python/pyarrow/_compute.pyx
+++ /dev/null
@@ -1,1092 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# cython: language_level = 3
-
-from cython.operator cimport dereference as deref
-
-from collections import namedtuple
-
-from pyarrow.lib import frombytes, tobytes, ordered_dict
-from pyarrow.lib cimport *
-from pyarrow.includes.libarrow cimport *
-import pyarrow.lib as lib
-
-import numpy as np
-
-
-cdef wrap_scalar_function(const shared_ptr[CFunction]& sp_func):
-    """
-    Wrap a C++ scalar Function in a ScalarFunction object.
-    """
-    cdef ScalarFunction func = ScalarFunction.__new__(ScalarFunction)
-    func.init(sp_func)
-    return func
-
-
-cdef wrap_vector_function(const shared_ptr[CFunction]& sp_func):
-    """
-    Wrap a C++ vector Function in a VectorFunction object.
-    """
-    cdef VectorFunction func = VectorFunction.__new__(VectorFunction)
-    func.init(sp_func)
-    return func
-
-
-cdef wrap_scalar_aggregate_function(const shared_ptr[CFunction]& sp_func):
-    """
-    Wrap a C++ aggregate Function in a ScalarAggregateFunction object.
-    """
-    cdef ScalarAggregateFunction func = (
-        ScalarAggregateFunction.__new__(ScalarAggregateFunction)
-    )
-    func.init(sp_func)
-    return func
-
-
-cdef wrap_hash_aggregate_function(const shared_ptr[CFunction]& sp_func):
-    """
-    Wrap a C++ aggregate Function in a HashAggregateFunction object.
-    """
-    cdef HashAggregateFunction func = (
-        HashAggregateFunction.__new__(HashAggregateFunction)
-    )
-    func.init(sp_func)
-    return func
-
-
-cdef wrap_meta_function(const shared_ptr[CFunction]& sp_func):
-    """
-    Wrap a C++ meta Function in a MetaFunction object.
-    """
-    cdef MetaFunction func = (
-        MetaFunction.__new__(MetaFunction)
-    )
-    func.init(sp_func)
-    return func
-
-
-cdef wrap_function(const shared_ptr[CFunction]& sp_func):
-    """
-    Wrap a C++ Function in a Function object.
-
-    This dispatches to specialized wrappers depending on the function kind.
-    """
-    if sp_func.get() == NULL:
-        raise ValueError('Function was NULL')
-
-    cdef FunctionKind c_kind = sp_func.get().kind()
-    if c_kind == FunctionKind_SCALAR:
-        return wrap_scalar_function(sp_func)
-    elif c_kind == FunctionKind_VECTOR:
-        return wrap_vector_function(sp_func)
-    elif c_kind == FunctionKind_SCALAR_AGGREGATE:
-        return wrap_scalar_aggregate_function(sp_func)
-    elif c_kind == FunctionKind_HASH_AGGREGATE:
-        return wrap_hash_aggregate_function(sp_func)
-    elif c_kind == FunctionKind_META:
-        return wrap_meta_function(sp_func)
-    else:
-        raise NotImplementedError("Unknown Function::Kind")
-
-
-cdef wrap_scalar_kernel(const CScalarKernel* c_kernel):
-    if c_kernel == NULL:
-        raise ValueError('Kernel was NULL')
-    cdef ScalarKernel kernel = ScalarKernel.__new__(ScalarKernel)
-    kernel.init(c_kernel)
-    return kernel
-
-
-cdef wrap_vector_kernel(const CVectorKernel* c_kernel):
-    if c_kernel == NULL:
-        raise ValueError('Kernel was NULL')
-    cdef VectorKernel kernel = VectorKernel.__new__(VectorKernel)
-    kernel.init(c_kernel)
-    return kernel
-
-
-cdef wrap_scalar_aggregate_kernel(const CScalarAggregateKernel* c_kernel):
-    if c_kernel == NULL:
-        raise ValueError('Kernel was NULL')
-    cdef ScalarAggregateKernel kernel = (
-        ScalarAggregateKernel.__new__(ScalarAggregateKernel)
-    )
-    kernel.init(c_kernel)
-    return kernel
-
-
-cdef wrap_hash_aggregate_kernel(const CHashAggregateKernel* c_kernel):
-    if c_kernel == NULL:
-        raise ValueError('Kernel was NULL')
-    cdef HashAggregateKernel kernel = (
-        HashAggregateKernel.__new__(HashAggregateKernel)
-    )
-    kernel.init(c_kernel)
-    return kernel
-
-
-cdef class Kernel(_Weakrefable):
-    """
-    A kernel object.
-
-    Kernels handle the execution of a Function for a certain signature.
-    """
-
-    def __init__(self):
-        raise TypeError("Do not call {}'s constructor directly"
-                        .format(self.__class__.__name__))
-
-
-cdef class ScalarKernel(Kernel):
-    cdef:
-        const CScalarKernel* kernel
-
-    cdef void init(self, const CScalarKernel* kernel) except *:
-        self.kernel = kernel
-
-    def __repr__(self):
-        return ("ScalarKernel<{}>"
-                .format(frombytes(self.kernel.signature.get().ToString())))
-
-
-cdef class VectorKernel(Kernel):
-    cdef:
-        const CVectorKernel* kernel
-
-    cdef void init(self, const CVectorKernel* kernel) except *:
-        self.kernel = kernel
-
-    def __repr__(self):
-        return ("VectorKernel<{}>"
-                .format(frombytes(self.kernel.signature.get().ToString())))
-
-
-cdef class ScalarAggregateKernel(Kernel):
-    cdef:
-        const CScalarAggregateKernel* kernel
-
-    cdef void init(self, const CScalarAggregateKernel* kernel) except *:
-        self.kernel = kernel
-
-    def __repr__(self):
-        return ("ScalarAggregateKernel<{}>"
-                .format(frombytes(self.kernel.signature.get().ToString())))
-
-
-cdef class HashAggregateKernel(Kernel):
-    cdef:
-        const CHashAggregateKernel* kernel
-
-    cdef void init(self, const CHashAggregateKernel* kernel) except *:
-        self.kernel = kernel
-
-    def __repr__(self):
-        return ("HashAggregateKernel<{}>"
-                .format(frombytes(self.kernel.signature.get().ToString())))
-
-
-FunctionDoc = namedtuple(
-    "FunctionDoc",
-    ("summary", "description", "arg_names", "options_class"))
-
-
-cdef class Function(_Weakrefable):
-    """
-    A compute function.
-
-    A function implements a certain logical computation over a range of
-    possible input signatures.  Each signature accepts a range of input
-    types and is implemented by a given Kernel.
-
-    Functions can be of different kinds:
-
-    * "scalar" functions apply an item-wise computation over all items
-      of their inputs.  Each item in the output only depends on the values
-      of the inputs at the same position.  Examples: addition, comparisons,
-      string predicates...
-
-    * "vector" functions apply a collection-wise computation, such that
-      each item in the output may depend on the values of several items
-      in each input.  Examples: dictionary encoding, sorting, extracting
-      unique values...
-
-    * "scalar_aggregate" functions reduce the dimensionality of the inputs by
-      applying a reduction function.  Examples: sum, min_max, mode...
-
-    * "hash_aggregate" functions apply a reduction function to an input
-      subdivided by grouping criteria.  They may not be directly called.
-      Examples: hash_sum, hash_min_max...
-
-    * "meta" functions dispatch to other functions.
-    """
-    cdef:
-        shared_ptr[CFunction] sp_func
-        CFunction* base_func
-
-    def __init__(self):
-        raise TypeError("Do not call {}'s constructor directly"
-                        .format(self.__class__.__name__))
-
-    cdef void init(self, const shared_ptr[CFunction]& sp_func) except *:
-        self.sp_func = sp_func
-        self.base_func = sp_func.get()
-
-    def __repr__(self):
-        return ("arrow.compute.Function<name={}, kind={}, "
-                "arity={}, num_kernels={}>"
-                ).format(self.name, self.kind, self.arity, self.num_kernels)
-
-    def __reduce__(self):
-        # Reduction uses the global registry
-        return get_function, (self.name,)
-
-    @property
-    def name(self):
-        """
-        The function name.
-        """
-        return frombytes(self.base_func.name())
-
-    @property
-    def arity(self):
-        """
-        The function arity.
-
-        If Ellipsis (i.e. `...`) is returned, the function takes a variable
-        number of arguments.
-        """
-        cdef CArity arity = self.base_func.arity()
-        if arity.is_varargs:
-            return ...
-        else:
-            return arity.num_args
-
-    @property
-    def kind(self):
-        """
-        The function kind.
-        """
-        cdef FunctionKind c_kind = self.base_func.kind()
-        if c_kind == FunctionKind_SCALAR:
-            return 'scalar'
-        elif c_kind == FunctionKind_VECTOR:
-            return 'vector'
-        elif c_kind == FunctionKind_SCALAR_AGGREGATE:
-            return 'scalar_aggregate'
-        elif c_kind == FunctionKind_HASH_AGGREGATE:
-            return 'hash_aggregate'
-        elif c_kind == FunctionKind_META:
-            return 'meta'
-        else:
-            raise NotImplementedError("Unknown Function::Kind")
-
-    @property
-    def _doc(self):
-        """
-        The C++-like function documentation (for internal use).
-        """
-        cdef CFunctionDoc c_doc = self.base_func.doc()
-
-        return FunctionDoc(frombytes(c_doc.summary),
-                           frombytes(c_doc.description),
-                           [frombytes(s) for s in c_doc.arg_names],
-                           frombytes(c_doc.options_class))
-
-    @property
-    def num_kernels(self):
-        """
-        The number of kernels implementing this function.
-        """
-        return self.base_func.num_kernels()
-
-    def call(self, args, FunctionOptions options=None,
-             MemoryPool memory_pool=None):
-        """
-        Call the function on the given arguments.
-        """
-        cdef:
-            const CFunctionOptions* c_options = NULL
-            CMemoryPool* pool = maybe_unbox_memory_pool(memory_pool)
-            CExecContext c_exec_ctx = CExecContext(pool)
-            vector[CDatum] c_args
-            CDatum result
-
-        _pack_compute_args(args, &c_args)
-
-        if options is not None:
-            c_options = options.get_options()
-
-        with nogil:
-            result = GetResultValue(self.base_func.Execute(c_args,
-                                                           c_options,
-                                                           &c_exec_ctx))
-
-        return wrap_datum(result)
-
-
-cdef class ScalarFunction(Function):
-    cdef:
-        const CScalarFunction* func
-
-    cdef void init(self, const shared_ptr[CFunction]& sp_func) except *:
-        Function.init(self, sp_func)
-        self.func = <const CScalarFunction*> sp_func.get()
-
-    @property
-    def kernels(self):
-        """
-        The kernels implementing this function.
-        """
-        cdef vector[const CScalarKernel*] kernels = self.func.kernels()
-        return [wrap_scalar_kernel(k) for k in kernels]
-
-
-cdef class VectorFunction(Function):
-    cdef:
-        const CVectorFunction* func
-
-    cdef void init(self, const shared_ptr[CFunction]& sp_func) except *:
-        Function.init(self, sp_func)
-        self.func = <const CVectorFunction*> sp_func.get()
-
-    @property
-    def kernels(self):
-        """
-        The kernels implementing this function.
-        """
-        cdef vector[const CVectorKernel*] kernels = self.func.kernels()
-        return [wrap_vector_kernel(k) for k in kernels]
-
-
-cdef class ScalarAggregateFunction(Function):
-    cdef:
-        const CScalarAggregateFunction* func
-
-    cdef void init(self, const shared_ptr[CFunction]& sp_func) except *:
-        Function.init(self, sp_func)
-        self.func = <const CScalarAggregateFunction*> sp_func.get()
-
-    @property
-    def kernels(self):
-        """
-        The kernels implementing this function.
-        """
-        cdef vector[const CScalarAggregateKernel*] kernels = (
-            self.func.kernels()
-        )
-        return [wrap_scalar_aggregate_kernel(k) for k in kernels]
-
-
-cdef class HashAggregateFunction(Function):
-    cdef:
-        const CHashAggregateFunction* func
-
-    cdef void init(self, const shared_ptr[CFunction]& sp_func) except *:
-        Function.init(self, sp_func)
-        self.func = <const CHashAggregateFunction*> sp_func.get()
-
-    @property
-    def kernels(self):
-        """
-        The kernels implementing this function.
-        """
-        cdef vector[const CHashAggregateKernel*] kernels = (
-            self.func.kernels()
-        )
-        return [wrap_hash_aggregate_kernel(k) for k in kernels]
-
-
-cdef class MetaFunction(Function):
-    cdef:
-        const CMetaFunction* func
-
-    cdef void init(self, const shared_ptr[CFunction]& sp_func) except *:
-        Function.init(self, sp_func)
-        self.func = <const CMetaFunction*> sp_func.get()
-
-    # Since num_kernels is exposed, also expose a kernels property
-
-    @property
-    def kernels(self):
-        """
-        The kernels implementing this function.
-        """
-        return []
-
-
-cdef _pack_compute_args(object values, vector[CDatum]* out):
-    for val in values:
-        if isinstance(val, (list, np.ndarray)):
-            val = lib.asarray(val)
-
-        if isinstance(val, Array):
-            out.push_back(CDatum((<Array> val).sp_array))
-            continue
-        elif isinstance(val, ChunkedArray):
-            out.push_back(CDatum((<ChunkedArray> val).sp_chunked_array))
-            continue
-        elif isinstance(val, Scalar):
-            out.push_back(CDatum((<Scalar> val).unwrap()))
-            continue
-        elif isinstance(val, RecordBatch):
-            out.push_back(CDatum((<RecordBatch> val).sp_batch))
-            continue
-        elif isinstance(val, Table):
-            out.push_back(CDatum((<Table> val).sp_table))
-            continue
-        else:
-            # Is it a Python scalar?
-            try:
-                scal = lib.scalar(val)
-            except Exception:
-                # Raise dedicated error below
-                pass
-            else:
-                out.push_back(CDatum((<Scalar> scal).unwrap()))
-                continue
-
-        raise TypeError("Got unexpected argument type {} "
-                        "for compute function".format(type(val)))
-
-
-cdef class FunctionRegistry(_Weakrefable):
-    cdef:
-        CFunctionRegistry* registry
-
-    def __init__(self):
-        self.registry = GetFunctionRegistry()
-
-    def list_functions(self):
-        """
-        Return all function names in the registry.
-        """
-        cdef vector[c_string] names = self.registry.GetFunctionNames()
-        return [frombytes(name) for name in names]
-
-    def get_function(self, name):
-        """
-        Look up a function by name in the registry.
-        """
-        cdef:
-            c_string c_name = tobytes(name)
-            shared_ptr[CFunction] func
-        with nogil:
-            func = GetResultValue(self.registry.GetFunction(c_name))
-        return wrap_function(func)
-
-
-cdef FunctionRegistry _global_func_registry = FunctionRegistry()
-
-
-def function_registry():
-    return _global_func_registry
-
-
-def get_function(name):
-    """
-    Get a function by name.
-
-    The function is looked up in the global registry
-    (as returned by `function_registry()`).
-    """
-    return _global_func_registry.get_function(name)
-
-
-def list_functions():
-    """
-    Return all function names in the global registry.
-    """
-    return _global_func_registry.list_functions()
-
-
-def call_function(name, args, options=None, memory_pool=None):
-    """
-    Call a named function.
-
-    The function is looked up in the global registry
-    (as returned by `function_registry()`).
-    """
-    func = _global_func_registry.get_function(name)
-    return func.call(args, options=options, memory_pool=memory_pool)
-
-
-cdef class FunctionOptions(_Weakrefable):
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        raise NotImplementedError("Unimplemented base options")
-
-
-# NOTE:
-# To properly expose the constructor signature of FunctionOptions
-# subclasses, we use a two-level inheritance:
-# 1. a C extension class that implements option validation and setting
-#    (won't expose function signatures because of
-#     https://github.com/cython/cython/issues/3873)
-# 2. a Python derived class that implements the constructor
-
-cdef class _CastOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CCastOptions] options
-
-    __slots__ = ()  # avoid mistakingly creating attributes
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.options.get()
-
-    def _set_options(self, DataType target_type, allow_int_overflow,
-                     allow_time_truncate, allow_time_overflow,
-                     allow_float_truncate, allow_invalid_utf8):
-        self.options.reset(new CCastOptions())
-        self._set_type(target_type)
-        if allow_int_overflow is not None:
-            self.allow_int_overflow = allow_int_overflow
-        if allow_time_truncate is not None:
-            self.allow_time_truncate = allow_time_truncate
-        if allow_time_overflow is not None:
-            self.allow_time_overflow = allow_time_overflow
-        if allow_float_truncate is not None:
-            self.allow_float_truncate = allow_float_truncate
-        if allow_invalid_utf8 is not None:
-            self.allow_invalid_utf8 = allow_invalid_utf8
-
-    def _set_type(self, target_type=None):
-        if target_type is not None:
-            deref(self.options).to_type = (
-                (<DataType> ensure_type(target_type)).sp_type
-            )
-
-    def _set_safe(self):
-        self.options.reset(new CCastOptions(CCastOptions.Safe()))
-
-    def _set_unsafe(self):
-        self.options.reset(new CCastOptions(CCastOptions.Unsafe()))
-
-    def is_safe(self):
-        return not (
-            deref(self.options).allow_int_overflow or
-            deref(self.options).allow_time_truncate or
-            deref(self.options).allow_time_overflow or
-            deref(self.options).allow_float_truncate or
-            deref(self.options).allow_invalid_utf8
-        )
-
-    @property
-    def allow_int_overflow(self):
-        return deref(self.options).allow_int_overflow
-
-    @allow_int_overflow.setter
-    def allow_int_overflow(self, bint flag):
-        deref(self.options).allow_int_overflow = flag
-
-    @property
-    def allow_time_truncate(self):
-        return deref(self.options).allow_time_truncate
-
-    @allow_time_truncate.setter
-    def allow_time_truncate(self, bint flag):
-        deref(self.options).allow_time_truncate = flag
-
-    @property
-    def allow_time_overflow(self):
-        return deref(self.options).allow_time_overflow
-
-    @allow_time_overflow.setter
-    def allow_time_overflow(self, bint flag):
-        deref(self.options).allow_time_overflow = flag
-
-    @property
-    def allow_float_truncate(self):
-        return deref(self.options).allow_float_truncate
-
-    @allow_float_truncate.setter
-    def allow_float_truncate(self, bint flag):
-        deref(self.options).allow_float_truncate = flag
-
-    @property
-    def allow_invalid_utf8(self):
-        return deref(self.options).allow_invalid_utf8
-
-    @allow_invalid_utf8.setter
-    def allow_invalid_utf8(self, bint flag):
-        deref(self.options).allow_invalid_utf8 = flag
-
-
-class CastOptions(_CastOptions):
-
-    def __init__(self, target_type=None, *, allow_int_overflow=None,
-                 allow_time_truncate=None, allow_time_overflow=None,
-                 allow_float_truncate=None, allow_invalid_utf8=None):
-        self._set_options(target_type, allow_int_overflow,
-                          allow_time_truncate, allow_time_overflow,
-                          allow_float_truncate, allow_invalid_utf8)
-
-    @staticmethod
-    def safe(target_type=None):
-        self = CastOptions()
-        self._set_safe()
-        self._set_type(target_type)
-        return self
-
-    @staticmethod
-    def unsafe(target_type=None):
-        self = CastOptions()
-        self._set_unsafe()
-        self._set_type(target_type)
-        return self
-
-
-cdef class _MatchSubstringOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CMatchSubstringOptions] match_substring_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.match_substring_options.get()
-
-    def _set_options(self, pattern):
-        self.match_substring_options.reset(
-            new CMatchSubstringOptions(tobytes(pattern)))
-
-
-class MatchSubstringOptions(_MatchSubstringOptions):
-    def __init__(self, pattern):
-        self._set_options(pattern)
-
-
-cdef class _TrimOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CTrimOptions] trim_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.trim_options.get()
-
-    def _set_options(self, characters):
-        self.trim_options.reset(
-            new CTrimOptions(tobytes(characters)))
-
-
-class TrimOptions(_TrimOptions):
-    def __init__(self, characters):
-        self._set_options(characters)
-
-
-cdef class _ReplaceSubstringOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CReplaceSubstringOptions] replace_substring_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.replace_substring_options.get()
-
-    def _set_options(self, pattern, replacement, max_replacements):
-        self.replace_substring_options.reset(
-            new CReplaceSubstringOptions(tobytes(pattern),
-                                         tobytes(replacement),
-                                         max_replacements)
-        )
-
-
-class ReplaceSubstringOptions(_ReplaceSubstringOptions):
-    def __init__(self, pattern, replacement, max_replacements=-1):
-        self._set_options(pattern, replacement, max_replacements)
-
-
-cdef class _FilterOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CFilterOptions] filter_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.filter_options.get()
-
-    def _set_options(self, null_selection_behavior):
-        if null_selection_behavior == 'drop':
-            self.filter_options.reset(
-                new CFilterOptions(CFilterNullSelectionBehavior_DROP))
-        elif null_selection_behavior == 'emit_null':
-            self.filter_options.reset(
-                new CFilterOptions(CFilterNullSelectionBehavior_EMIT_NULL))
-        else:
-            raise ValueError(
-                '"{}" is not a valid null_selection_behavior'
-                .format(null_selection_behavior))
-
-
-class FilterOptions(_FilterOptions):
-    def __init__(self, null_selection_behavior='drop'):
-        self._set_options(null_selection_behavior)
-
-
-cdef class _DictionaryEncodeOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CDictionaryEncodeOptions] dictionary_encode_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.dictionary_encode_options.get()
-
-    def _set_options(self, null_encoding_behavior):
-        if null_encoding_behavior == 'encode':
-            self.dictionary_encode_options.reset(
-                new CDictionaryEncodeOptions(
-                    CDictionaryEncodeNullEncodingBehavior_ENCODE))
-        elif null_encoding_behavior == 'mask':
-            self.dictionary_encode_options.reset(
-                new CDictionaryEncodeOptions(
-                    CDictionaryEncodeNullEncodingBehavior_MASK))
-        else:
-            raise ValueError('"{}" is not a valid null_encoding_behavior'
-                             .format(null_encoding_behavior))
-
-
-class DictionaryEncodeOptions(_DictionaryEncodeOptions):
-    def __init__(self, null_encoding_behavior='mask'):
-        self._set_options(null_encoding_behavior)
-
-
-cdef class _TakeOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CTakeOptions] take_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.take_options.get()
-
-    def _set_options(self, boundscheck):
-        self.take_options.reset(new CTakeOptions(boundscheck))
-
-
-class TakeOptions(_TakeOptions):
-    def __init__(self, *, boundscheck=True):
-        self._set_options(boundscheck)
-
-
-cdef class _PartitionNthOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CPartitionNthOptions] partition_nth_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.partition_nth_options.get()
-
-    def _set_options(self, int64_t pivot):
-        self.partition_nth_options.reset(new CPartitionNthOptions(pivot))
-
-
-class PartitionNthOptions(_PartitionNthOptions):
-    def __init__(self, int64_t pivot):
-        self._set_options(pivot)
-
-
-cdef class _ProjectOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CProjectOptions] project_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.project_options.get()
-
-    def _set_options(self, field_names):
-        cdef:
-            vector[c_string] c_field_names
-        for n in field_names:
-            c_field_names.push_back(tobytes(n))
-        self.project_options.reset(new CProjectOptions(field_names))
-
-
-class ProjectOptions(_ProjectOptions):
-    def __init__(self, field_names):
-        self._set_options(field_names)
-
-
-cdef class _MinMaxOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CMinMaxOptions] min_max_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.min_max_options.get()
-
-    def _set_options(self, null_handling):
-        if null_handling == 'skip':
-            self.min_max_options.reset(
-                new CMinMaxOptions(CMinMaxMode_SKIP))
-        elif null_handling == 'emit_null':
-            self.min_max_options.reset(
-                new CMinMaxOptions(CMinMaxMode_EMIT_NULL))
-        else:
-            raise ValueError(
-                '{!r} is not a valid null_handling'
-                .format(null_handling))
-
-
-class MinMaxOptions(_MinMaxOptions):
-    def __init__(self, null_handling='skip'):
-        self._set_options(null_handling)
-
-
-cdef class _CountOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CCountOptions] count_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.count_options.get()
-
-    def _set_options(self, count_mode):
-        if count_mode == 'count_null':
-            self.count_options.reset(
-                new CCountOptions(CCountMode_COUNT_NULL))
-        elif count_mode == 'count_non_null':
-            self.count_options.reset(
-                new CCountOptions(CCountMode_COUNT_NON_NULL))
-        else:
-            raise ValueError(
-                '{!r} is not a valid count_mode'
-                .format(count_mode))
-
-
-class CountOptions(_CountOptions):
-    def __init__(self, count_mode='count_non_null'):
-        self._set_options(count_mode)
-
-
-cdef class _ModeOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CModeOptions] mode_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.mode_options.get()
-
-    def _set_options(self, n):
-        self.mode_options.reset(new CModeOptions(n))
-
-
-class ModeOptions(_ModeOptions):
-    def __init__(self, n=1):
-        self._set_options(n)
-
-
-cdef class _SetLookupOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CSetLookupOptions] set_lookup_options
-        unique_ptr[CDatum] valset
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.set_lookup_options.get()
-
-    def _set_options(self, value_set, c_bool skip_nulls):
-        if isinstance(value_set, Array):
-            self.valset.reset(new CDatum((<Array> value_set).sp_array))
-        elif isinstance(value_set, ChunkedArray):
-            self.valset.reset(
-                new CDatum((<ChunkedArray> value_set).sp_chunked_array)
-            )
-        elif isinstance(value_set, Scalar):
-            self.valset.reset(new CDatum((<Scalar> value_set).unwrap()))
-        else:
-            raise ValueError('"{}" is not a valid value_set'.format(value_set))
-
-        self.set_lookup_options.reset(
-            new CSetLookupOptions(deref(self.valset), skip_nulls)
-        )
-
-
-class SetLookupOptions(_SetLookupOptions):
-    def __init__(self, *, value_set, skip_nulls=False):
-        self._set_options(value_set, skip_nulls)
-
-
-cdef class _StrptimeOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CStrptimeOptions] strptime_options
-        TimeUnit time_unit
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.strptime_options.get()
-
-    def _set_options(self, format, unit):
-        if unit == 's':
-            self.time_unit = TimeUnit_SECOND
-        elif unit == 'ms':
-            self.time_unit = TimeUnit_MILLI
-        elif unit == 'us':
-            self.time_unit = TimeUnit_MICRO
-        elif unit == 'ns':
-            self.time_unit = TimeUnit_NANO
-        else:
-            raise ValueError('"{}" is not a valid time unit'.format(unit))
-
-        self.strptime_options.reset(
-            new CStrptimeOptions(tobytes(format), self.time_unit)
-        )
-
-
-class StrptimeOptions(_StrptimeOptions):
-    def __init__(self, format, unit):
-        self._set_options(format, unit)
-
-
-cdef class _VarianceOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CVarianceOptions] variance_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.variance_options.get()
-
-    def _set_options(self, ddof):
-        self.variance_options.reset(new CVarianceOptions(ddof))
-
-
-class VarianceOptions(_VarianceOptions):
-    def __init__(self, *, ddof=0):
-        self._set_options(ddof)
-
-
-cdef class _SplitOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CSplitOptions] split_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.split_options.get()
-
-    def _set_options(self, max_splits, reverse):
-        self.split_options.reset(
-            new CSplitOptions(max_splits, reverse))
-
-
-class SplitOptions(_SplitOptions):
-    def __init__(self, *, max_splits=-1, reverse=False):
-        self._set_options(max_splits, reverse)
-
-
-cdef class _SplitPatternOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CSplitPatternOptions] split_pattern_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.split_pattern_options.get()
-
-    def _set_options(self, pattern, max_splits, reverse):
-        self.split_pattern_options.reset(
-            new CSplitPatternOptions(tobytes(pattern), max_splits, reverse))
-
-
-class SplitPatternOptions(_SplitPatternOptions):
-    def __init__(self, *, pattern, max_splits=-1, reverse=False):
-        self._set_options(pattern, max_splits, reverse)
-
-
-cdef class _ArraySortOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CArraySortOptions] array_sort_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.array_sort_options.get()
-
-    def _set_options(self, order):
-        if order == "ascending":
-            self.array_sort_options.reset(
-                new CArraySortOptions(CSortOrder_Ascending))
-        elif order == "descending":
-            self.array_sort_options.reset(
-                new CArraySortOptions(CSortOrder_Descending))
-        else:
-            raise ValueError(
-                "{!r} is not a valid order".format(order)
-            )
-
-
-class ArraySortOptions(_ArraySortOptions):
-    def __init__(self, *, order='ascending'):
-        self._set_options(order)
-
-
-cdef class _SortOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CSortOptions] sort_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.sort_options.get()
-
-    def _set_options(self, sort_keys):
-        cdef:
-            vector[CSortKey] c_sort_keys
-            c_string c_name
-            CSortOrder c_order
-
-        for name, order in sort_keys:
-            if order == "ascending":
-                c_order = CSortOrder_Ascending
-            elif order == "descending":
-                c_order = CSortOrder_Descending
-            else:
-                raise ValueError(
-                    "{!r} is not a valid order".format(order)
-                )
-            c_name = tobytes(name)
-            c_sort_keys.push_back(CSortKey(c_name, c_order))
-
-        self.sort_options.reset(new CSortOptions(c_sort_keys))
-
-
-class SortOptions(_SortOptions):
-    def __init__(self, sort_keys=None):
-        if sort_keys is None:
-            sort_keys = []
-        self._set_options(sort_keys)
-
-
-cdef class _QuantileOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CQuantileOptions] quantile_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.quantile_options.get()
-
-    def _set_options(self, quantiles, interp):
-        interp_dict = {
-            'linear': CQuantileInterp_LINEAR,
-            'lower': CQuantileInterp_LOWER,
-            'higher': CQuantileInterp_HIGHER,
-            'nearest': CQuantileInterp_NEAREST,
-            'midpoint': CQuantileInterp_MIDPOINT,
-        }
-        if interp not in interp_dict:
-            raise ValueError(
-                '{!r} is not a valid interpolation'
-                .format(interp))
-        self.quantile_options.reset(
-            new CQuantileOptions(quantiles, interp_dict[interp]))
-
-
-class QuantileOptions(_QuantileOptions):
-    def __init__(self, *, q=0.5, interpolation='linear'):
-        if not isinstance(q, (list, tuple, np.ndarray)):
-            q = [q]
-        self._set_options(q, interpolation)
-
-
-cdef class _TDigestOptions(FunctionOptions):
-    cdef:
-        unique_ptr[CTDigestOptions] tdigest_options
-
-    cdef const CFunctionOptions* get_options(self) except NULL:
-        return self.tdigest_options.get()
-
-    def _set_options(self, quantiles, delta, buffer_size):
-        self.tdigest_options.reset(
-            new CTDigestOptions(quantiles, delta, buffer_size))
-
-
-class TDigestOptions(_TDigestOptions):
-    def __init__(self, *, q=0.5, delta=100, buffer_size=500):
-        if not isinstance(q, (list, tuple, np.ndarray)):
-            q = [q]
-        self._set_options(q, delta, buffer_size)
diff --git a/python/pyarrow/_csv.pxd b/python/pyarrow/_csv.pxd
deleted file mode 100644
index f8e12f1..0000000
--- a/python/pyarrow/_csv.pxd
+++ /dev/null
@@ -1,46 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# cython: language_level = 3
-
-from pyarrow.includes.libarrow cimport *
-from pyarrow.lib cimport _Weakrefable
-
-
-cdef class ConvertOptions(_Weakrefable):
-    cdef:
-        CCSVConvertOptions options
-
-    @staticmethod
-    cdef ConvertOptions wrap(CCSVConvertOptions options)
-
-
-cdef class ParseOptions(_Weakrefable):
-    cdef:
-        CCSVParseOptions options
-
-    @staticmethod
-    cdef ParseOptions wrap(CCSVParseOptions options)
-
-
-cdef class ReadOptions(_Weakrefable):
-    cdef:
-        CCSVReadOptions options
-        public object encoding
-
-    @staticmethod
-    cdef ReadOptions wrap(CCSVReadOptions options)
diff --git a/python/pyarrow/_csv.pyx b/python/pyarrow/_csv.pyx
deleted file mode 100644
index a98160c..0000000
--- a/python/pyarrow/_csv.pyx
+++ /dev/null
@@ -1,952 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# cython: profile=False
-# distutils: language = c++
-# cython: embedsignature = True
-# cython: language_level = 3
-
-from cython.operator cimport dereference as deref
-
-import codecs
-from collections.abc import Mapping
-
-from pyarrow.includes.common cimport *
-from pyarrow.includes.libarrow cimport *
-from pyarrow.lib cimport (check_status, Field, MemoryPool, Schema,
-                          RecordBatchReader, ensure_type,
-                          maybe_unbox_memory_pool, get_input_stream,
-                          get_writer, native_transcoding_input_stream,
-                          pyarrow_unwrap_batch, pyarrow_unwrap_table,
-                          pyarrow_wrap_schema, pyarrow_wrap_table,
-                          pyarrow_wrap_data_type, pyarrow_unwrap_data_type,
-                          Table, RecordBatch, StopToken)
-from pyarrow.lib import frombytes, tobytes, SignalStopHandler
-from pyarrow.util import _stringify_path
-
-
-cdef unsigned char _single_char(s) except 0:
-    val = ord(s)
-    if val == 0 or val > 127:
-        raise ValueError("Expecting an ASCII character")
-    return <unsigned char> val
-
-
-cdef class ReadOptions(_Weakrefable):
-    """
-    Options for reading CSV files.
-
-    Parameters
-    ----------
-    use_threads : bool, optional (default True)
-        Whether to use multiple threads to accelerate reading
-    block_size : int, optional
-        How much bytes to process at a time from the input stream.
-        This will determine multi-threading granularity as well as
-        the size of individual chunks in the Table.
-    skip_rows: int, optional (default 0)
-        The number of rows to skip before the column names (if any)
-        and the CSV data.
-    column_names: list, optional
-        The column names of the target table.  If empty, fall back on
-        `autogenerate_column_names`.
-    autogenerate_column_names: bool, optional (default False)
-        Whether to autogenerate column names if `column_names` is empty.
-        If true, column names will be of the form "f0", "f1"...
-        If false, column names will be read from the first CSV row
-        after `skip_rows`.
-    encoding: str, optional (default 'utf8')
-        The character encoding of the CSV data.  Columns that cannot
-        decode using this encoding can still be read as Binary.
-    """
-
-    # Avoid mistakingly creating attributes
-    __slots__ = ()
-
-    def __init__(self, *, use_threads=None, block_size=None, skip_rows=None,
-                 column_names=None, autogenerate_column_names=None,
-                 encoding='utf8'):
-        self.options = CCSVReadOptions.Defaults()
-        if use_threads is not None:
-            self.use_threads = use_threads
-        if block_size is not None:
-            self.block_size = block_size
-        if skip_rows is not None:
-            self.skip_rows = skip_rows
-        if column_names is not None:
-            self.column_names = column_names
-        if autogenerate_column_names is not None:
-            self.autogenerate_column_names= autogenerate_column_names
-        # Python-specific option
-        self.encoding = encoding
-
-    @property
-    def use_threads(self):
-        """
-        Whether to use multiple threads to accelerate reading.
-        """
-        return self.options.use_threads
-
-    @use_threads.setter
-    def use_threads(self, value):
-        self.options.use_threads = value
-
-    @property
-    def block_size(self):
-        """
-        How much bytes to process at a time from the input stream.
-        This will determine multi-threading granularity as well as
-        the size of individual chunks in the Table.
-        """
-        return self.options.block_size
-
-    @block_size.setter
-    def block_size(self, value):
-        self.options.block_size = value
-
-    @property
-    def skip_rows(self):
-        """
-        The number of rows to skip before the column names (if any)
-        and the CSV data.
-        """
-        return self.options.skip_rows
-
-    @skip_rows.setter
-    def skip_rows(self, value):
-        self.options.skip_rows = value
-
-    @property
-    def column_names(self):
-        """
-        The column names of the target table.  If empty, fall back on
-        `autogenerate_column_names`.
-        """
-        return [frombytes(s) for s in self.options.column_names]
-
-    @column_names.setter
-    def column_names(self, value):
-        self.options.column_names.clear()
-        for item in value:
-            self.options.column_names.push_back(tobytes(item))
-
-    @property
-    def autogenerate_column_names(self):
-        """
-        Whether to autogenerate column names if `column_names` is empty.
-        If true, column names will be of the form "f0", "f1"...
-        If false, column names will be read from the first CSV row
-        after `skip_rows`.
-        """
-        return self.options.autogenerate_column_names
-
-    @autogenerate_column_names.setter
-    def autogenerate_column_names(self, value):
-        self.options.autogenerate_column_names = value
-
-    def equals(self, ReadOptions other):
-        return (
-            self.use_threads == other.use_threads and
-            self.block_size == other.block_size and
-            self.skip_rows == other.skip_rows and
-            self.column_names == other.column_names and
-            self.autogenerate_column_names ==
-            other.autogenerate_column_names and
-            self.encoding == other.encoding
-        )
-
-    @staticmethod
-    cdef ReadOptions wrap(CCSVReadOptions options):
-        out = ReadOptions()
-        out.options = options
-        out.encoding = 'utf8'  # No way to know this
-        return out
-
-    def __getstate__(self):
-        return (self.use_threads, self.block_size, self.skip_rows,
-                self.column_names, self.autogenerate_column_names,
-                self.encoding)
-
-    def __setstate__(self, state):
-        (self.use_threads, self.block_size, self.skip_rows,
-         self.column_names, self.autogenerate_column_names,
-         self.encoding) = state
-
-    def __eq__(self, other):
-        try:
-            return self.equals(other)
-        except TypeError:
-            return False
-
-
-cdef class ParseOptions(_Weakrefable):
-    """
-    Options for parsing CSV files.
-
-    Parameters
-    ----------
-    delimiter: 1-character string, optional (default ',')
-        The character delimiting individual cells in the CSV data.
-    quote_char: 1-character string or False, optional (default '"')
-        The character used optionally for quoting CSV values
-        (False if quoting is not allowed).
-    double_quote: bool, optional (default True)
-        Whether two quotes in a quoted CSV value denote a single quote
-        in the data.
-    escape_char: 1-character string or False, optional (default False)
-        The character used optionally for escaping special characters
-        (False if escaping is not allowed).
-    newlines_in_values: bool, optional (default False)
-        Whether newline characters are allowed in CSV values.
-        Setting this to True reduces the performance of multi-threaded
-        CSV reading.
-    ignore_empty_lines: bool, optional (default True)
-        Whether empty lines are ignored in CSV input.
-        If False, an empty line is interpreted as containing a single empty
-        value (assuming a one-column CSV file).
-    """
-    __slots__ = ()
-
-    def __init__(self, *, delimiter=None, quote_char=None, double_quote=None,
-                 escape_char=None, newlines_in_values=None,
-                 ignore_empty_lines=None):
-        self.options = CCSVParseOptions.Defaults()
-        if delimiter is not None:
-            self.delimiter = delimiter
-        if quote_char is not None:
-            self.quote_char = quote_char
-        if double_quote is not None:
-            self.double_quote = double_quote
-        if escape_char is not None:
-            self.escape_char = escape_char
-        if newlines_in_values is not None:
-            self.newlines_in_values = newlines_in_values
-        if ignore_empty_lines is not None:
-            self.ignore_empty_lines = ignore_empty_lines
-
-    @property
-    def delimiter(self):
-        """
-        The character delimiting individual cells in the CSV data.
-        """
-        return chr(self.options.delimiter)
-
-    @delimiter.setter
-    def delimiter(self, value):
-        self.options.delimiter = _single_char(value)
-
-    @property
-    def quote_char(self):
-        """
-        The character used optionally for quoting CSV values
-        (False if quoting is not allowed).
-        """
-        if self.options.quoting:
-            return chr(self.options.quote_char)
-        else:
-            return False
-
-    @quote_char.setter
-    def quote_char(self, value):
-        if value is False:
-            self.options.quoting = False
-        else:
-            self.options.quote_char = _single_char(value)
-            self.options.quoting = True
-
-    @property
-    def double_quote(self):
-        """
-        Whether two quotes in a quoted CSV value denote a single quote
-        in the data.
-        """
-        return self.options.double_quote
-
-    @double_quote.setter
-    def double_quote(self, value):
-        self.options.double_quote = value
-
-    @property
-    def escape_char(self):
-        """
-        The character used optionally for escaping special characters
-        (False if escaping is not allowed).
-        """
-        if self.options.escaping:
-            return chr(self.options.escape_char)
-        else:
-            return False
-
-    @escape_char.setter
-    def escape_char(self, value):
-        if value is False:
-            self.options.escaping = False
-        else:
-            self.options.escape_char = _single_char(value)
-            self.options.escaping = True
-
-    @property
-    def newlines_in_values(self):
-        """
-        Whether newline characters are allowed in CSV values.
-        Setting this to True reduces the performance of multi-threaded
-        CSV reading.
-        """
-        return self.options.newlines_in_values
-
-    @newlines_in_values.setter
-    def newlines_in_values(self, value):
-        self.options.newlines_in_values = value
-
-    @property
-    def ignore_empty_lines(self):
-        """
-        Whether empty lines are ignored in CSV input.
-        If False, an empty line is interpreted as containing a single empty
-        value (assuming a one-column CSV file).
-        """
-        return self.options.ignore_empty_lines
-
-    @ignore_empty_lines.setter
-    def ignore_empty_lines(self, value):
-        self.options.ignore_empty_lines = value
-
-    def equals(self, ParseOptions other):
-        return (
-            self.delimiter == other.delimiter and
-            self.quote_char == other.quote_char and
-            self.double_quote == other.double_quote and
-            self.escape_char == other.escape_char and
-            self.newlines_in_values == other.newlines_in_values and
-            self.ignore_empty_lines == other.ignore_empty_lines
-        )
-
-    @staticmethod
-    cdef ParseOptions wrap(CCSVParseOptions options):
-        out = ParseOptions()
-        out.options = options
-        return out
-
-    def __getstate__(self):
-        return (self.delimiter, self.quote_char, self.double_quote,
-                self.escape_char, self.newlines_in_values,
-                self.ignore_empty_lines)
-
-    def __setstate__(self, state):
-        (self.delimiter, self.quote_char, self.double_quote,
-         self.escape_char, self.newlines_in_values,
-         self.ignore_empty_lines) = state
-
-    def __eq__(self, other):
-        try:
-            return self.equals(other)
-        except TypeError:
-            return False
-
-
-cdef class _ISO8601(_Weakrefable):
-    """
-    A special object indicating ISO-8601 parsing.
-    """
-    __slots__ = ()
-
-    def __str__(self):
-        return 'ISO8601'
-
-    def __eq__(self, other):
-        return isinstance(other, _ISO8601)
-
-
-ISO8601 = _ISO8601()
-
-
-cdef class ConvertOptions(_Weakrefable):
-    """
-    Options for converting CSV data.
-
-    Parameters
-    ----------
-    check_utf8 : bool, optional (default True)
-        Whether to check UTF8 validity of string columns.
-    column_types: pa.Schema or dict, optional
-        Explicitly map column names to column types. Passing this argument
-        disables type inference on the defined columns.
-    null_values: list, optional
-        A sequence of strings that denote nulls in the data
-        (defaults are appropriate in most cases). Note that by default,
-        string columns are not checked for null values. To enable
-        null checking for those, specify ``strings_can_be_null=True``.
-    true_values: list, optional
-        A sequence of strings that denote true booleans in the data
-        (defaults are appropriate in most cases).
-    false_values: list, optional
-        A sequence of strings that denote false booleans in the data
-        (defaults are appropriate in most cases).
-    timestamp_parsers: list, optional
-        A sequence of strptime()-compatible format strings, tried in order
-        when attempting to infer or convert timestamp values (the special
-        value ISO8601() can also be given).  By default, a fast built-in
-        ISO-8601 parser is used.
-    strings_can_be_null: bool, optional (default False)
-        Whether string / binary columns can have null values.
-        If true, then strings in null_values are considered null for
-        string columns.
-        If false, then all strings are valid string values.
-    auto_dict_encode: bool, optional (default False)
-        Whether to try to automatically dict-encode string / binary data.
-        If true, then when type inference detects a string or binary column,
-        it it dict-encoded up to `auto_dict_max_cardinality` distinct values
-        (per chunk), after which it switches to regular encoding.
-        This setting is ignored for non-inferred columns (those in
-        `column_types`).
-    auto_dict_max_cardinality: int, optional
-        The maximum dictionary cardinality for `auto_dict_encode`.
-        This value is per chunk.
-    include_columns: list, optional
-        The names of columns to include in the Table.
-        If empty, the Table will include all columns from the CSV file.
-        If not empty, only these columns will be included, in this order.
-    include_missing_columns: bool, optional (default False)
-        If false, columns in `include_columns` but not in the CSV file will
-        error out.
-        If true, columns in `include_columns` but not in the CSV file will
-        produce a column of nulls (whose type is selected using
-        `column_types`, or null by default).
-        This option is ignored if `include_columns` is empty.
-    """
-    # Avoid mistakingly creating attributes
-    __slots__ = ()
-
-    def __init__(self, *, check_utf8=None, column_types=None, null_values=None,
-                 true_values=None, false_values=None,
-                 strings_can_be_null=None, include_columns=None,
-                 include_missing_columns=None, auto_dict_encode=None,
-                 auto_dict_max_cardinality=None, timestamp_parsers=None):
-        self.options = CCSVConvertOptions.Defaults()
-        if check_utf8 is not None:
-            self.check_utf8 = check_utf8
-        if column_types is not None:
-            self.column_types = column_types
-        if null_values is not None:
-            self.null_values = null_values
-        if true_values is not None:
-            self.true_values = true_values
-        if false_values is not None:
-            self.false_values = false_values
-        if strings_can_be_null is not None:
-            self.strings_can_be_null = strings_can_be_null
-        if include_columns is not None:
-            self.include_columns = include_columns
-        if include_missing_columns is not None:
-            self.include_missing_columns = include_missing_columns
-        if auto_dict_encode is not None:
-            self.auto_dict_encode = auto_dict_encode
-        if auto_dict_max_cardinality is not None:
-            self.auto_dict_max_cardinality = auto_dict_max_cardinality
-        if timestamp_parsers is not None:
-            self.timestamp_parsers = timestamp_parsers
-
-    @property
-    def check_utf8(self):
-        """
-        Whether to check UTF8 validity of string columns.
-        """
-        return self.options.check_utf8
-
-    @check_utf8.setter
-    def check_utf8(self, value):
-        self.options.check_utf8 = value
-
-    @property
-    def strings_can_be_null(self):
-        """
-        Whether string / binary columns can have null values.
-        """
-        return self.options.strings_can_be_null
-
-    @strings_can_be_null.setter
-    def strings_can_be_null(self, value):
-        self.options.strings_can_be_null = value
-
-    @property
-    def column_types(self):
-        """
-        Explicitly map column names to column types.
-        """
-        d = {frombytes(item.first): pyarrow_wrap_data_type(item.second)
-             for item in self.options.column_types}
-        return d
-
-    @column_types.setter
-    def column_types(self, value):
-        cdef:
-            shared_ptr[CDataType] typ
-
-        if isinstance(value, Mapping):
-            value = value.items()
-
-        self.options.column_types.clear()
-        for item in value:
-            if isinstance(item, Field):
-                k = item.name
-                v = item.type
-            else:
-                k, v = item
-            typ = pyarrow_unwrap_data_type(ensure_type(v))
-            assert typ != NULL
-            self.options.column_types[tobytes(k)] = typ
-
-    @property
-    def null_values(self):
-        """
-        A sequence of strings that denote nulls in the data.
-        """
-        return [frombytes(x) for x in self.options.null_values]
-
-    @null_values.setter
-    def null_values(self, value):
-        self.options.null_values = [tobytes(x) for x in value]
-
-    @property
-    def true_values(self):
-        """
-        A sequence of strings that denote true booleans in the data.
-        """
-        return [frombytes(x) for x in self.options.true_values]
-
-    @true_values.setter
-    def true_values(self, value):
-        self.options.true_values = [tobytes(x) for x in value]
-
-    @property
-    def false_values(self):
-        """
-        A sequence of strings that denote false booleans in the data.
-        """
-        return [frombytes(x) for x in self.options.false_values]
-
-    @false_values.setter
-    def false_values(self, value):
-        self.options.false_values = [tobytes(x) for x in value]
-
-    @property
-    def auto_dict_encode(self):
-        """
-        Whether to try to automatically dict-encode string / binary data.
-        """
-        return self.options.auto_dict_encode
-
-    @auto_dict_encode.setter
-    def auto_dict_encode(self, value):
-        self.options.auto_dict_encode = value
-
-    @property
-    def auto_dict_max_cardinality(self):
-        """
-        The maximum dictionary cardinality for `auto_dict_encode`.
-
-        This value is per chunk.
-        """
-        return self.options.auto_dict_max_cardinality
-
-    @auto_dict_max_cardinality.setter
-    def auto_dict_max_cardinality(self, value):
-        self.options.auto_dict_max_cardinality = value
-
-    @property
-    def include_columns(self):
-        """
-        The names of columns to include in the Table.
-
-        If empty, the Table will include all columns from the CSV file.
-        If not empty, only these columns will be included, in this order.
-        """
-        return [frombytes(s) for s in self.options.include_columns]
-
-    @include_columns.setter
-    def include_columns(self, value):
-        self.options.include_columns.clear()
-        for item in value:
-            self.options.include_columns.push_back(tobytes(item))
-
-    @property
-    def include_missing_columns(self):
-        """
-        If false, columns in `include_columns` but not in the CSV file will
-        error out.
-        If true, columns in `include_columns` but not in the CSV file will
-        produce a null column (whose type is selected using `column_types`,
-        or null by default).
-        This option is ignored if `include_columns` is empty.
-        """
-        return self.options.include_missing_columns
-
-    @include_missing_columns.setter
-    def include_missing_columns(self, value):
-        self.options.include_missing_columns = value
-
-    @property
-    def timestamp_parsers(self):
-        """
-        A sequence of strptime()-compatible format strings, tried in order
-        when attempting to infer or convert timestamp values (the special
-        value ISO8601() can also be given).  By default, a fast built-in
-        ISO-8601 parser is used.
-        """
-        cdef:
-            shared_ptr[CTimestampParser] c_parser
-            c_string kind
-
-        parsers = []
-        for c_parser in self.options.timestamp_parsers:
-            kind = deref(c_parser).kind()
-            if kind == b'strptime':
-                parsers.append(frombytes(deref(c_parser).format()))
-            else:
-                assert kind == b'iso8601'
-                parsers.append(ISO8601)
-
-        return parsers
-
-    @timestamp_parsers.setter
-    def timestamp_parsers(self, value):
-        cdef:
-            vector[shared_ptr[CTimestampParser]] c_parsers
-
-        for v in value:
-            if isinstance(v, str):
-                c_parsers.push_back(CTimestampParser.MakeStrptime(tobytes(v)))
-            elif v == ISO8601:
-                c_parsers.push_back(CTimestampParser.MakeISO8601())
-            else:
-                raise TypeError("Expected list of str or ISO8601 objects")
-
-        self.options.timestamp_parsers = move(c_parsers)
-
-    @staticmethod
-    cdef ConvertOptions wrap(CCSVConvertOptions options):
-        out = ConvertOptions()
-        out.options = options
-        return out
-
-    def equals(self, ConvertOptions other):
-        return (
-            self.check_utf8 == other.check_utf8 and
-            self.column_types == other.column_types and
-            self.null_values == other.null_values and
-            self.true_values == other.true_values and
-            self.false_values == other.false_values and
-            self.timestamp_parsers == other.timestamp_parsers and
-            self.strings_can_be_null == other.strings_can_be_null and
-            self.auto_dict_encode == other.auto_dict_encode and
-            self.auto_dict_max_cardinality ==
-            other.auto_dict_max_cardinality and
-            self.include_columns == other.include_columns and
-            self.include_missing_columns == other.include_missing_columns
-        )
-
-    def __getstate__(self):
-        return (self.check_utf8, self.column_types, self.null_values,
-                self.true_values, self.false_values, self.timestamp_parsers,
-                self.strings_can_be_null, self.auto_dict_encode,
-                self.auto_dict_max_cardinality, self.include_columns,
-                self.include_missing_columns)
-
-    def __setstate__(self, state):
-        (self.check_utf8, self.column_types, self.null_values,
-         self.true_values, self.false_values, self.timestamp_parsers,
-         self.strings_can_be_null, self.auto_dict_encode,
-         self.auto_dict_max_cardinality, self.include_columns,
-         self.include_missing_columns) = state
-
-    def __eq__(self, other):
-        try:
-            return self.equals(other)
-        except TypeError:
-            return False
-
-
-cdef _get_reader(input_file, ReadOptions read_options,
-                 shared_ptr[CInputStream]* out):
-    use_memory_map = False
-    get_input_stream(input_file, use_memory_map, out)
-    if read_options is not None:
-        out[0] = native_transcoding_input_stream(out[0],
-                                                 read_options.encoding,
-                                                 'utf8')
-
-
-cdef _get_read_options(ReadOptions read_options, CCSVReadOptions* out):
-    if read_options is None:
-        out[0] = CCSVReadOptions.Defaults()
-    else:
-        out[0] = read_options.options
-
-
-cdef _get_parse_options(ParseOptions parse_options, CCSVParseOptions* out):
-    if parse_options is None:
-        out[0] = CCSVParseOptions.Defaults()
-    else:
-        out[0] = parse_options.options
-
-
-cdef _get_convert_options(ConvertOptions convert_options,
-                          CCSVConvertOptions* out):
-    if convert_options is None:
-        out[0] = CCSVConvertOptions.Defaults()
-    else:
-        out[0] = convert_options.options
-
-
-cdef class CSVStreamingReader(RecordBatchReader):
-    """An object that reads record batches incrementally from a CSV file.
-
-    Should not be instantiated directly by user code.
-    """
-    cdef readonly:
-        Schema schema
-
-    def __init__(self):
-        raise TypeError("Do not call {}'s constructor directly, "
-                        "use pyarrow.csv.open_csv() instead."
-                        .format(self.__class__.__name__))
-
-    # Note about cancellation: we cannot create a SignalStopHandler
-    # by default here, as several CSVStreamingReader instances may be
-    # created (including by the same thread).  Handling cancellation
-    # would require having the user pass the SignalStopHandler.
-    # (in addition to solving ARROW-11853)
-
-    cdef _open(self, shared_ptr[CInputStream] stream,
-               CCSVReadOptions c_read_options,
-               CCSVParseOptions c_parse_options,
-               CCSVConvertOptions c_convert_options,
-               MemoryPool memory_pool):
-        cdef:
-            shared_ptr[CSchema] c_schema
-            CIOContext io_context
-
-        io_context = CIOContext(maybe_unbox_memory_pool(memory_pool))
-
-        with nogil:
-            self.reader = <shared_ptr[CRecordBatchReader]> GetResultValue(
-                CCSVStreamingReader.Make(
-                    io_context, stream,
-                    move(c_read_options), move(c_parse_options),
-                    move(c_convert_options)))
-            c_schema = self.reader.get().schema()
-
-        self.schema = pyarrow_wrap_schema(c_schema)
-
-
-def read_csv(input_file, read_options=None, parse_options=None,
-             convert_options=None, MemoryPool memory_pool=None):
-    """
-    Read a Table from a stream of CSV data.
-
-    Parameters
-    ----------
-    input_file: string, path or file-like object
-        The location of CSV data.  If a string or path, and if it ends
-        with a recognized compressed file extension (e.g. ".gz" or ".bz2"),
-        the data is automatically decompressed when reading.
-    read_options: pyarrow.csv.ReadOptions, optional
-        Options for the CSV reader (see pyarrow.csv.ReadOptions constructor
-        for defaults)
-    parse_options: pyarrow.csv.ParseOptions, optional
-        Options for the CSV parser
-        (see pyarrow.csv.ParseOptions constructor for defaults)
-    convert_options: pyarrow.csv.ConvertOptions, optional
-        Options for converting CSV data
-        (see pyarrow.csv.ConvertOptions constructor for defaults)
-    memory_pool: MemoryPool, optional
-        Pool to allocate Table memory from
-
-    Returns
-    -------
-    :class:`pyarrow.Table`
-        Contents of the CSV file as a in-memory table.
-    """
-    cdef:
-        shared_ptr[CInputStream] stream
-        CCSVReadOptions c_read_options
-        CCSVParseOptions c_parse_options
-        CCSVConvertOptions c_convert_options
-        CIOContext io_context
-        shared_ptr[CCSVReader] reader
-        shared_ptr[CTable] table
-
-    _get_reader(input_file, read_options, &stream)
-    _get_read_options(read_options, &c_read_options)
-    _get_parse_options(parse_options, &c_parse_options)
-    _get_convert_options(convert_options, &c_convert_options)
-
-    with SignalStopHandler() as stop_handler:
-        io_context = CIOContext(
-            maybe_unbox_memory_pool(memory_pool),
-            (<StopToken> stop_handler.stop_token).stop_token)
-        reader = GetResultValue(CCSVReader.Make(
-            io_context, stream,
-            c_read_options, c_parse_options, c_convert_options))
-
-        with nogil:
-            table = GetResultValue(reader.get().Read())
-
-    return pyarrow_wrap_table(table)
-
-
-def open_csv(input_file, read_options=None, parse_options=None,
-             convert_options=None, MemoryPool memory_pool=None):
-    """
-    Open a streaming reader of CSV data.
-
-    Reading using this function is always single-threaded.
-
-    Parameters
-    ----------
-    input_file: string, path or file-like object
-        The location of CSV data.  If a string or path, and if it ends
-        with a recognized compressed file extension (e.g. ".gz" or ".bz2"),
-        the data is automatically decompressed when reading.
-    read_options: pyarrow.csv.ReadOptions, optional
-        Options for the CSV reader (see pyarrow.csv.ReadOptions constructor
-        for defaults)
-    parse_options: pyarrow.csv.ParseOptions, optional
-        Options for the CSV parser
-        (see pyarrow.csv.ParseOptions constructor for defaults)
-    convert_options: pyarrow.csv.ConvertOptions, optional
-        Options for converting CSV data
-        (see pyarrow.csv.ConvertOptions constructor for defaults)
-    memory_pool: MemoryPool, optional
-        Pool to allocate Table memory from
-
-    Returns
-    -------
-    :class:`pyarrow.csv.CSVStreamingReader`
-    """
-    cdef:
-        shared_ptr[CInputStream] stream
-        CCSVReadOptions c_read_options
-        CCSVParseOptions c_parse_options
-        CCSVConvertOptions c_convert_options
-        CSVStreamingReader reader
-
-    _get_reader(input_file, read_options, &stream)
-    _get_read_options(read_options, &c_read_options)
-    _get_parse_options(parse_options, &c_parse_options)
-    _get_convert_options(convert_options, &c_convert_options)
-
-    reader = CSVStreamingReader.__new__(CSVStreamingReader)
-    reader._open(stream, move(c_read_options), move(c_parse_options),
-                 move(c_convert_options), memory_pool)
-    return reader
-
-
-cdef class WriteOptions(_Weakrefable):
-    """
-    Options for writing CSV files.
-
-    Parameters
-    ----------
-    include_header : bool, optional (default True)
-        Whether to write an initial header line with column names
-    batch_size : int, optional (default 1024)
-        How many rows to process together when converting and writing
-        CSV data
-    """
-    cdef:
-        CCSVWriteOptions options
-
-    # Avoid mistakingly creating attributes
-    __slots__ = ()
-
-    def __init__(self, *, include_header=None, batch_size=None):
-        self.options = CCSVWriteOptions.Defaults()
-        if include_header is not None:
-            self.include_header = include_header
-        if batch_size is not None:
-            self.batch_size = batch_size
-
-    @property
-    def include_header(self):
-        """
-        Whether to write an initial header line with column names.
-        """
-        return self.options.include_header
-
-    @include_header.setter
-    def include_header(self, value):
-        self.options.include_header = value
-
-    @property
-    def batch_size(self):
-        """
-        How many rows to process together when converting and writing
-        CSV data.
-        """
-        return self.options.batch_size
-
-    @batch_size.setter
-    def batch_size(self, value):
-        self.options.batch_size = value
-
-
-cdef _get_write_options(WriteOptions write_options, CCSVWriteOptions* out):
-    if write_options is None:
-        out[0] = CCSVWriteOptions.Defaults()
-    else:
-        out[0] = write_options.options
-
-
-def write_csv(data, output_file, write_options=None,
-              MemoryPool memory_pool=None):
-    """
-    Write record batch or table to a CSV file.
-
-    Parameters
-    ----------
-    data: pyarrow.RecordBatch or pyarrow.Table
-        The data to write.
-    output_file: string, path, pyarrow.OutputStream or file-like object
-        The location where to write the CSV data.
-    write_options: pyarrow.csv.WriteOptions
-        Options to configure writing the CSV data.
-    memory_pool: MemoryPool, optional
-        Pool for temporary allocations.
-    """
-    cdef:
-        shared_ptr[COutputStream] stream
-        CCSVWriteOptions c_write_options
-        CMemoryPool* c_memory_pool
-        CRecordBatch* batch
-        CTable* table
-    _get_write_options(write_options, &c_write_options)
-
-    get_writer(output_file, &stream)
-    c_memory_pool = maybe_unbox_memory_pool(memory_pool)
-    if isinstance(data, RecordBatch):
-        batch = pyarrow_unwrap_batch(data).get()
-        with nogil:
-            check_status(WriteCSV(deref(batch), c_write_options, c_memory_pool,
-                                  stream.get()))
-    elif isinstance(data, Table):
-        table = pyarrow_unwrap_table(data).get()
-        with nogil:
-            check_status(WriteCSV(deref(table), c_write_options, c_memory_pool,
-                                  stream.get()))
-    else:
-        raise TypeError(f"Expected Table or RecordBatch, got '{type(data)}'")
diff --git a/python/pyarrow/_cuda.pxd b/python/pyarrow/_cuda.pxd
deleted file mode 100644
index 6acb882..0000000
--- a/python/pyarrow/_cuda.pxd
+++ /dev/null
@@ -1,67 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# cython: language_level = 3
-
-from pyarrow.lib cimport *
-from pyarrow.includes.common cimport *
-from pyarrow.includes.libarrow cimport *
-from pyarrow.includes.libarrow_cuda cimport *
-
-
-cdef class Context(_Weakrefable):
-    cdef:
-        shared_ptr[CCudaContext] context
-        int device_number
-
-    cdef void init(self, const shared_ptr[CCudaContext]& ctx)
-
-
-cdef class IpcMemHandle(_Weakrefable):
-    cdef:
-        shared_ptr[CCudaIpcMemHandle] handle
-
-    cdef void init(self, shared_ptr[CCudaIpcMemHandle]& h)
-
-
-cdef class CudaBuffer(Buffer):
-    cdef:
-        shared_ptr[CCudaBuffer] cuda_buffer
-        object base
-
-    cdef void init_cuda(self,
-                        const shared_ptr[CCudaBuffer]& buffer,
-                        object base)
-
-
-cdef class HostBuffer(Buffer):
-    cdef:
-        shared_ptr[CCudaHostBuffer] host_buffer
-
-    cdef void init_host(self, const shared_ptr[CCudaHostBuffer]& buffer)
-
-
-cdef class BufferReader(NativeFile):
-    cdef:
-        CCudaBufferReader* reader
-        CudaBuffer buffer
-
-
-cdef class BufferWriter(NativeFile):
-    cdef:
-        CCudaBufferWriter* writer
-        CudaBuffer buffer
diff --git a/python/pyarrow/_cuda.pyx b/python/pyarrow/_cuda.pyx
deleted file mode 100644
index f4ca763..0000000
--- a/python/pyarrow/_cuda.pyx
+++ /dev/null
@@ -1,1059 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-
-from pyarrow.lib import tobytes
-from pyarrow.lib cimport *
-from pyarrow.includes.libarrow_cuda cimport *
-from pyarrow.lib import py_buffer, allocate_buffer, as_buffer, ArrowTypeError
-from pyarrow.util import get_contiguous_span
-cimport cpython as cp
-
-
-cdef class Context(_Weakrefable):
-    """
-    CUDA driver context.
-    """
-
-    def __init__(self, *args, **kwargs):
-        """
-        Create a CUDA driver context for a particular device.
-
-        If a CUDA context handle is passed, it is wrapped, otherwise
-        a default CUDA context for the given device is requested.
-
-        Parameters
-        ----------
-        device_number : int (default 0)
-          Specify the GPU device for which the CUDA driver context is
-          requested.
-        handle : int, optional
-          Specify CUDA handle for a shared context that has been created
-          by another library.
-        """
-        # This method exposed because autodoc doesn't pick __cinit__
-
-    def __cinit__(self, int device_number=0, uintptr_t handle=0):
-        cdef CCudaDeviceManager* manager
-        manager = GetResultValue(CCudaDeviceManager.Instance())
-        cdef int n = manager.num_devices()
-        if device_number >= n or device_number < 0:
-            self.context.reset()
-            raise ValueError('device_number argument must be '
-                             'non-negative less than %s' % (n))
-        if handle == 0:
-            self.context = GetResultValue(manager.GetContext(device_number))
-        else:
-            self.context = GetResultValue(manager.GetSharedContext(
-                device_number, <void*>handle))
-        self.device_number = device_number
-
-    @staticmethod
-    def from_numba(context=None):
-        """
-        Create a Context instance from a Numba CUDA context.
-
-        Parameters
-        ----------
-        context : {numba.cuda.cudadrv.driver.Context, None}
-          A Numba CUDA context instance.
-          If None, the current Numba context is used.
-
-        Returns
-        -------
-        shared_context : pyarrow.cuda.Context
-          Context instance.
-        """
-        if context is None:
-            import numba.cuda
-            context = numba.cuda.current_context()
-        return Context(device_number=context.device.id,
-                       handle=context.handle.value)
-
-    def to_numba(self):
-        """
-        Convert Context to a Numba CUDA context.
-
-        Returns
-        -------
-        context : numba.cuda.cudadrv.driver.Context
-          Numba CUDA context instance.
-        """
-        import ctypes
-        import numba.cuda
-        device = numba.cuda.gpus[self.device_number]
-        handle = ctypes.c_void_p(self.handle)
-        context = numba.cuda.cudadrv.driver.Context(device, handle)
-
-        class DummyPendingDeallocs(object):
-            # Context is managed by pyarrow
-            def add_item(self, *args, **kwargs):
-                pass
-
-        context.deallocations = DummyPendingDeallocs()
-        return context
-
-    @staticmethod
-    def get_num_devices():
-        """ Return the number of GPU devices.
-        """
-        cdef CCudaDeviceManager* manager
-        manager = GetResultValue(CCudaDeviceManager.Instance())
-        return manager.num_devices()
-
-    @property
-    def device_number(self):
-        """ Return context device number.
-        """
-        return self.device_number
-
-    @property
-    def handle(self):
-        """ Return pointer to context handle.
-        """
-        return <uintptr_t>self.context.get().handle()
-
-    cdef void init(self, const shared_ptr[CCudaContext]& ctx):
-        self.context = ctx
-
-    def synchronize(self):
-        """Blocks until the device has completed all preceding requested
-        tasks.
-        """
-        check_status(self.context.get().Synchronize())
-
-    @property
-    def bytes_allocated(self):
-        """Return the number of allocated bytes.
-        """
-        return self.context.get().bytes_allocated()
-
-    def get_device_address(self, uintptr_t address):
-        """Return the device address that is reachable from kernels running in
-        the context
-
-        Parameters
-        ----------
-        address : int
-          Specify memory address value
-
-        Returns
-        -------
-        device_address : int
-          Device address accessible from device context
-
-        Notes
-        -----
-        The device address is defined as a memory address accessible
-        by device. While it is often a device memory address but it
-        can be also a host memory address, for instance, when the
-        memory is allocated as host memory (using cudaMallocHost or
-        cudaHostAlloc) or as managed memory (using cudaMallocManaged)
-        or the host memory is page-locked (using cudaHostRegister).
-        """
-        return GetResultValue(self.context.get().GetDeviceAddress(address))
-
-    def new_buffer(self, int64_t nbytes):
-        """Return new device buffer.
-
-        Parameters
-        ----------
-        nbytes : int
-          Specify the number of bytes to be allocated.
-
-        Returns
-        -------
-        buf : CudaBuffer
-          Allocated buffer.
-        """
-        cdef:
-            shared_ptr[CCudaBuffer] cudabuf
-        with nogil:
-            cudabuf = GetResultValue(self.context.get().Allocate(nbytes))
-        return pyarrow_wrap_cudabuffer(cudabuf)
-
-    def foreign_buffer(self, address, size, base=None):
-        """Create device buffer from address and size as a view.
-
-        The caller is responsible for allocating and freeing the
-        memory. When `address==size==0` then a new zero-sized buffer
-        is returned.
-
-        Parameters
-        ----------
-        address : int
-          Specify the starting address of the buffer. The address can
-          refer to both device or host memory but it must be
-          accessible from device after mapping it with
-          `get_device_address` method.
-        size : int
-          Specify the size of device buffer in bytes.
-        base : {None, object}
-          Specify object that owns the referenced memory.
-
-        Returns
-        -------
-        cbuf : CudaBuffer
-          Device buffer as a view of device reachable memory.
-
-        """
-        if not address and size == 0:
-            return self.new_buffer(0)
-        cdef:
-            uintptr_t c_addr = self.get_device_address(address)
-            int64_t c_size = size
-            shared_ptr[CCudaBuffer] cudabuf
-
-        cudabuf = GetResultValue(self.context.get().View(
-            <uint8_t*>c_addr, c_size))
-        return pyarrow_wrap_cudabuffer_base(cudabuf, base)
-
-    def open_ipc_buffer(self, ipc_handle):
-        """ Open existing CUDA IPC memory handle
-
-        Parameters
-        ----------
-        ipc_handle : IpcMemHandle
-          Specify opaque pointer to CUipcMemHandle (driver API).
-
-        Returns
-        -------
-        buf : CudaBuffer
-          referencing device buffer
-        """
-        handle = pyarrow_unwrap_cudaipcmemhandle(ipc_handle)
-        cdef shared_ptr[CCudaBuffer] cudabuf
-        with nogil:
-            cudabuf = GetResultValue(
-                self.context.get().OpenIpcBuffer(handle.get()[0]))
-        return pyarrow_wrap_cudabuffer(cudabuf)
-
-    def buffer_from_data(self, object data, int64_t offset=0, int64_t size=-1):
-        """Create device buffer and initialize with data.
-
-        Parameters
-        ----------
-        data : {CudaBuffer, HostBuffer, Buffer, array-like}
-          Specify data to be copied to device buffer.
-        offset : int
-          Specify the offset of input buffer for device data
-          buffering. Default: 0.
-        size : int
-          Specify the size of device buffer in bytes. Default: all
-          (starting from input offset)
-
-        Returns
-        -------
-        cbuf : CudaBuffer
-          Device buffer with copied data.
-        """
-        is_host_data = not pyarrow_is_cudabuffer(data)
-        buf = as_buffer(data) if is_host_data else data
-
-        bsize = buf.size
-        if offset < 0 or (bsize and offset >= bsize):
-            raise ValueError('offset argument is out-of-range')
-        if size < 0:
-            size = bsize - offset
-        elif offset + size > bsize:
-            raise ValueError(
-                'requested larger slice than available in device buffer')
-
-        if offset != 0 or size != bsize:
-            buf = buf.slice(offset, size)
-
-        result = self.new_buffer(size)
-        if is_host_data:
-            result.copy_from_host(buf, position=0, nbytes=size)
-        else:
-            result.copy_from_device(buf, position=0, nbytes=size)
-        return result
-
-    def buffer_from_object(self, obj):
-        """Create device buffer view of arbitrary object that references
-        device accessible memory.
-
-        When the object contains a non-contiguous view of device
-        accessible memory then the returned device buffer will contain
-        contiguous view of the memory, that is, including the
-        intermediate data that is otherwise invisible to the input
-        object.
-
-        Parameters
-        ----------
-        obj : {object, Buffer, HostBuffer, CudaBuffer, ...}
-          Specify an object that holds (device or host) address that
-          can be accessed from device. This includes objects with
-          types defined in pyarrow.cuda as well as arbitrary objects
-          that implement the CUDA array interface as defined by numba.
-
-        Returns
-        -------
-        cbuf : CudaBuffer
-          Device buffer as a view of device accessible memory.
-
-        """
-        if isinstance(obj, HostBuffer):
-            return self.foreign_buffer(obj.address, obj.size, base=obj)
-        elif isinstance(obj, Buffer):
-            return CudaBuffer.from_buffer(obj)
-        elif isinstance(obj, CudaBuffer):
-            return obj
-        elif hasattr(obj, '__cuda_array_interface__'):
-            desc = obj.__cuda_array_interface__
-            addr = desc['data'][0]
-            if addr is None:
-                return self.new_buffer(0)
-            import numpy as np
-            start, end = get_contiguous_span(
-                desc['shape'], desc.get('strides'),
-                np.dtype(desc['typestr']).itemsize)
-            return self.foreign_buffer(addr + start, end - start, base=obj)
-        raise ArrowTypeError('cannot create device buffer view from'
-                             ' `%s` object' % (type(obj)))
-
-
-cdef class IpcMemHandle(_Weakrefable):
-    """A serializable container for a CUDA IPC handle.
-    """
-    cdef void init(self, shared_ptr[CCudaIpcMemHandle]& h):
-        self.handle = h
-
-    @staticmethod
-    def from_buffer(Buffer opaque_handle):
-        """Create IpcMemHandle from opaque buffer (e.g. from another
-        process)
-
-        Parameters
-        ----------
-        opaque_handle :
-          a CUipcMemHandle as a const void*
-
-        Results
-        -------
-        ipc_handle : IpcMemHandle
-        """
-        c_buf = pyarrow_unwrap_buffer(opaque_handle)
-        cdef:
-            shared_ptr[CCudaIpcMemHandle] handle
-
-        handle = GetResultValue(
-            CCudaIpcMemHandle.FromBuffer(c_buf.get().data()))
-        return pyarrow_wrap_cudaipcmemhandle(handle)
-
-    def serialize(self, pool=None):
-        """Write IpcMemHandle to a Buffer
-
-        Parameters
-        ----------
-        pool : {MemoryPool, None}
-          Specify a pool to allocate memory from
-
-        Returns
-        -------
-        buf : Buffer
-          The serialized buffer.
-        """
-        cdef CMemoryPool* pool_ = maybe_unbox_memory_pool(pool)
-        cdef shared_ptr[CBuffer] buf
-        cdef CCudaIpcMemHandle* h = self.handle.get()
-        with nogil:
-            buf = GetResultValue(h.Serialize(pool_))
-        return pyarrow_wrap_buffer(buf)
-
-
-cdef class CudaBuffer(Buffer):
-    """An Arrow buffer with data located in a GPU device.
-
-    To create a CudaBuffer instance, use Context.device_buffer().
-
-    The memory allocated in a CudaBuffer is freed when the buffer object
-    is deleted.
-    """
-
-    def __init__(self):
-        raise TypeError("Do not call CudaBuffer's constructor directly, use "
-                        "`<pyarrow.Context instance>.device_buffer`"
-                        " method instead.")
-
-    cdef void init_cuda(self,
-                        const shared_ptr[CCudaBuffer]& buffer,
-                        object base):
-        self.cuda_buffer = buffer
-        self.init(<shared_ptr[CBuffer]> buffer)
-        self.base = base
-
-    @staticmethod
-    def from_buffer(buf):
-        """ Convert back generic buffer into CudaBuffer
-
-        Parameters
-        ----------
-        buf : Buffer
-          Specify buffer containing CudaBuffer
-
-        Returns
-        -------
-        dbuf : CudaBuffer
-          Resulting device buffer.
-        """
-        c_buf = pyarrow_unwrap_buffer(buf)
-        cuda_buffer = GetResultValue(CCudaBuffer.FromBuffer(c_buf))
-        return pyarrow_wrap_cudabuffer(cuda_buffer)
-
-    @staticmethod
-    def from_numba(mem):
-        """Create a CudaBuffer view from numba MemoryPointer instance.
-
-        Parameters
-        ----------
-        mem :  numba.cuda.cudadrv.driver.MemoryPointer
-
-        Returns
-        -------
-        cbuf : CudaBuffer
-          Device buffer as a view of numba MemoryPointer.
-        """
-        ctx = Context.from_numba(mem.context)
-        if mem.device_pointer.value is None and mem.size==0:
-            return ctx.new_buffer(0)
-        return ctx.foreign_buffer(mem.device_pointer.value, mem.size, base=mem)
-
-    def to_numba(self):
-        """Return numba memory pointer of CudaBuffer instance.
-        """
-        import ctypes
-        from numba.cuda.cudadrv.driver import MemoryPointer
-        return MemoryPointer(self.context.to_numba(),
-                             pointer=ctypes.c_void_p(self.address),
-                             size=self.size)
-
-    cdef getitem(self, int64_t i):
-        return self.copy_to_host(position=i, nbytes=1)[0]
-
-    def copy_to_host(self, int64_t position=0, int64_t nbytes=-1,
-                     Buffer buf=None,
-                     MemoryPool memory_pool=None, c_bool resizable=False):
-        """Copy memory from GPU device to CPU host
-
-        Caller is responsible for ensuring that all tasks affecting
-        the memory are finished. Use
-
-          `<CudaBuffer instance>.context.synchronize()`
-
-        when needed.
-
-        Parameters
-        ----------
-        position : int
-          Specify the starting position of the source data in GPU
-          device buffer. Default: 0.
-        nbytes : int
-          Specify the number of bytes to copy. Default: -1 (all from
-          the position until host buffer is full).
-        buf : Buffer
-          Specify a pre-allocated output buffer in host. Default: None
-          (allocate new output buffer).
-        memory_pool : MemoryPool
-        resizable : bool
-          Specify extra arguments to allocate_buffer. Used only when
-          buf is None.
-
-        Returns
-        -------
-        buf : Buffer
-          Output buffer in host.
-
-        """
-        if position < 0 or (self.size and position > self.size) \
-           or (self.size == 0 and position != 0):
-            raise ValueError('position argument is out-of-range')
-        cdef:
-            int64_t c_nbytes
-        if buf is None:
-            if nbytes < 0:
-                # copy all starting from position to new host buffer
-                c_nbytes = self.size - position
-            else:
-                if nbytes > self.size - position:
-                    raise ValueError(
-                        'requested more to copy than available from '
-                        'device buffer')
-                # copy nbytes starting from position to new host buffeer
-                c_nbytes = nbytes
-            buf = allocate_buffer(c_nbytes, memory_pool=memory_pool,
-                                  resizable=resizable)
-        else:
-            if nbytes < 0:
-                # copy all from position until given host buffer is full
-                c_nbytes = min(self.size - position, buf.size)
-            else:
-                if nbytes > buf.size:
-                    raise ValueError(
-                        'requested copy does not fit into host buffer')
-                # copy nbytes from position to given host buffer
-                c_nbytes = nbytes
-
-        cdef:
-            shared_ptr[CBuffer] c_buf = pyarrow_unwrap_buffer(buf)
-            int64_t c_position = position
-        with nogil:
-            check_status(self.cuda_buffer.get()
-                         .CopyToHost(c_position, c_nbytes,
-                                     c_buf.get().mutable_data()))
-        return buf
-
-    def copy_from_host(self, data, int64_t position=0, int64_t nbytes=-1):
-        """Copy data from host to device.
-
-        The device buffer must be pre-allocated.
-
-        Parameters
-        ----------
-        data : {Buffer, array-like}
-          Specify data in host. It can be array-like that is valid
-          argument to py_buffer
-        position : int
-          Specify the starting position of the copy in device buffer.
-          Default: 0.
-        nbytes : int
-          Specify the number of bytes to copy. Default: -1 (all from
-          source until device buffer, starting from position, is full)
-
-        Returns
-        -------
-        nbytes : int
-          Number of bytes copied.
-        """
-        if position < 0 or position > self.size:
-            raise ValueError('position argument is out-of-range')
-        cdef:
-            int64_t c_nbytes
-        buf = as_buffer(data)
-
-        if nbytes < 0:
-            # copy from host buffer to device buffer starting from
-            # position until device buffer is full
-            c_nbytes = min(self.size - position, buf.size)
-        else:
-            if nbytes > buf.size:
-                raise ValueError(
-                    'requested more to copy than available from host buffer')
-            if nbytes > self.size - position:
-                raise ValueError(
-                    'requested more to copy than available in device buffer')
-            # copy nbytes from host buffer to device buffer starting
-            # from position
-            c_nbytes = nbytes
-
-        cdef:
-            shared_ptr[CBuffer] c_buf = pyarrow_unwrap_buffer(buf)
-            int64_t c_position = position
-        with nogil:
-            check_status(self.cuda_buffer.get().
-                         CopyFromHost(c_position, c_buf.get().data(),
-                                      c_nbytes))
-        return c_nbytes
-
-    def copy_from_device(self, buf, int64_t position=0, int64_t nbytes=-1):
-        """Copy data from device to device.
-
-        Parameters
-        ----------
-        buf : CudaBuffer
-          Specify source device buffer.
-        position : int
-          Specify the starting position of the copy in device buffer.
-          Default: 0.
-        nbytes : int
-          Specify the number of bytes to copy. Default: -1 (all from
-          source until device buffer, starting from position, is full)
-
-        Returns
-        -------
-        nbytes : int
-          Number of bytes copied.
-
-        """
-        if position < 0 or position > self.size:
-            raise ValueError('position argument is out-of-range')
-        cdef:
-            int64_t c_nbytes
-
-        if nbytes < 0:
-            # copy from source device buffer to device buffer starting
-            # from position until device buffer is full
-            c_nbytes = min(self.size - position, buf.size)
-        else:
-            if nbytes > buf.size:
-                raise ValueError(
-                    'requested more to copy than available from device buffer')
-            if nbytes > self.size - position:
-                raise ValueError(
-                    'requested more to copy than available in device buffer')
-            # copy nbytes from source device buffer to device buffer
-            # starting from position
-            c_nbytes = nbytes
-
-        cdef:
-            shared_ptr[CCudaBuffer] c_buf = pyarrow_unwrap_cudabuffer(buf)
-            int64_t c_position = position
-            shared_ptr[CCudaContext] c_src_ctx = pyarrow_unwrap_cudacontext(
-                buf.context)
-            void* c_source_data = <void*>(c_buf.get().address())
-
-        if self.context.handle != buf.context.handle:
-            with nogil:
-                check_status(self.cuda_buffer.get().
-                             CopyFromAnotherDevice(c_src_ctx, c_position,
-                                                   c_source_data, c_nbytes))
-        else:
-            with nogil:
-                check_status(self.cuda_buffer.get().
-                             CopyFromDevice(c_position, c_source_data,
-                                            c_nbytes))
-        return c_nbytes
-
-    def export_for_ipc(self):
-        """
-        Expose this device buffer as IPC memory which can be used in other
-        processes.
-
-        After calling this function, this device memory will not be
-        freed when the CudaBuffer is destructed.
-
-        Returns
-        -------
-        ipc_handle : IpcMemHandle
-          The exported IPC handle
-
-        """
-        cdef shared_ptr[CCudaIpcMemHandle] handle
-        with nogil:
-            handle = GetResultValue(self.cuda_buffer.get().ExportForIpc())
-        return pyarrow_wrap_cudaipcmemhandle(handle)
-
-    @property
-    def context(self):
-        """Returns the CUDA driver context of this buffer.
-        """
-        return pyarrow_wrap_cudacontext(self.cuda_buffer.get().context())
-
-    def slice(self, offset=0, length=None):
-        """Return slice of device buffer
-
-        Parameters
-        ----------
-        offset : int, default 0
-          Specify offset from the start of device buffer to slice
-        length : int, default None
-          Specify the length of slice (default is until end of device
-          buffer starting from offset). If the length is larger than
-          the data available, the returned slice will have a size of
-          the available data starting from the offset.
-
-        Returns
-        -------
-        sliced : CudaBuffer
-          Zero-copy slice of device buffer.
-
-        """
-        if offset < 0 or (self.size and offset >= self.size):
-            raise ValueError('offset argument is out-of-range')
-        cdef int64_t offset_ = offset
-        cdef int64_t size
-        if length is None:
-            size = self.size - offset_
-        elif offset + length <= self.size:
-            size = length
-        else:
-            size = self.size - offset
-        parent = pyarrow_unwrap_cudabuffer(self)
-        return pyarrow_wrap_cudabuffer(make_shared[CCudaBuffer](parent,
-                                                                offset_, size))
-
-    def to_pybytes(self):
-        """Return device buffer content as Python bytes.
-        """
-        return self.copy_to_host().to_pybytes()
-
-    def __getbuffer__(self, cp.Py_buffer* buffer, int flags):
-        # Device buffer contains data pointers on the device. Hence,
-        # cannot support buffer protocol PEP-3118 for CudaBuffer.
-        raise BufferError('buffer protocol for device buffer not supported')
-
-
-cdef class HostBuffer(Buffer):
-    """Device-accessible CPU memory created using cudaHostAlloc.
-
-    To create a HostBuffer instance, use
-
-      cuda.new_host_buffer(<nbytes>)
-    """
-
-    def __init__(self):
-        raise TypeError("Do not call HostBuffer's constructor directly,"
-                        " use `cuda.new_host_buffer` function instead.")
-
-    cdef void init_host(self, const shared_ptr[CCudaHostBuffer]& buffer):
-        self.host_buffer = buffer
-        self.init(<shared_ptr[CBuffer]> buffer)
-
-    @property
-    def size(self):
-        return self.host_buffer.get().size()
-
-
-cdef class BufferReader(NativeFile):
-    """File interface for zero-copy read from CUDA buffers.
-
-    Note: Read methods return pointers to device memory. This means
-    you must be careful using this interface with any Arrow code which
-    may expect to be able to do anything other than pointer arithmetic
-    on the returned buffers.
-    """
-
-    def __cinit__(self, CudaBuffer obj):
-        self.buffer = obj
-        self.reader = new CCudaBufferReader(self.buffer.buffer)
-        self.set_random_access_file(
-            shared_ptr[CRandomAccessFile](self.reader))
-        self.is_readable = True
-
-    def read_buffer(self, nbytes=None):
-        """Return a slice view of the underlying device buffer.
-
-        The slice will start at the current reader position and will
-        have specified size in bytes.
-
-        Parameters
-        ----------
-        nbytes : int, default None
-          Specify the number of bytes to read. Default: None (read all
-          remaining bytes).
-
-        Returns
-        -------
-        cbuf : CudaBuffer
-          New device buffer.
-
-        """
-        cdef:
-            int64_t c_nbytes
-            int64_t bytes_read = 0
-            shared_ptr[CCudaBuffer] output
-
-        if nbytes is None:
-            c_nbytes = self.size() - self.tell()
-        else:
-            c_nbytes = nbytes
-
-        with nogil:
-            output = static_pointer_cast[CCudaBuffer, CBuffer](
-                GetResultValue(self.reader.Read(c_nbytes)))
-
-        return pyarrow_wrap_cudabuffer(output)
-
-
-cdef class BufferWriter(NativeFile):
-    """File interface for writing to CUDA buffers.
-
-    By default writes are unbuffered. Use set_buffer_size to enable
-    buffering.
-    """
-
-    def __cinit__(self, CudaBuffer buffer):
-        self.buffer = buffer
-        self.writer = new CCudaBufferWriter(self.buffer.cuda_buffer)
-        self.set_output_stream(shared_ptr[COutputStream](self.writer))
-        self.is_writable = True
-
-    def writeat(self, int64_t position, object data):
-        """Write data to buffer starting from position.
-
-        Parameters
-        ----------
-        position : int
-          Specify device buffer position where the data will be
-          written.
-        data : array-like
-          Specify data, the data instance must implement buffer
-          protocol.
-        """
-        cdef:
-            Buffer buf = as_buffer(data)
-            const uint8_t* c_data = buf.buffer.get().data()
-            int64_t c_size = buf.buffer.get().size()
-
-        with nogil:
-            check_status(self.writer.WriteAt(position, c_data, c_size))
-
-    def flush(self):
-        """ Flush the buffer stream """
-        with nogil:
-            check_status(self.writer.Flush())
-
-    def seek(self, int64_t position, int whence=0):
-        # TODO: remove this method after NativeFile.seek supports
-        # writable files.
-        cdef int64_t offset
-
-        with nogil:
-            if whence == 0:
-                offset = position
-            elif whence == 1:
-                offset = GetResultValue(self.writer.Tell())
-                offset = offset + position
-            else:
-                with gil:
-                    raise ValueError("Invalid value of whence: {0}"
-                                     .format(whence))
-            check_status(self.writer.Seek(offset))
-        return self.tell()
-
-    @property
-    def buffer_size(self):
-        """Returns size of host (CPU) buffer, 0 for unbuffered
-        """
-        return self.writer.buffer_size()
-
-    @buffer_size.setter
-    def buffer_size(self, int64_t buffer_size):
-        """Set CPU buffer size to limit calls to cudaMemcpy
-
-        Parameters
-        ----------
-        buffer_size : int
-          Specify the size of CPU buffer to allocate in bytes.
-        """
-        with nogil:
-            check_status(self.writer.SetBufferSize(buffer_size))
-
-    @property
-    def num_bytes_buffered(self):
-        """Returns number of bytes buffered on host
-        """
-        return self.writer.num_bytes_buffered()
-
-# Functions
-
-
-def new_host_buffer(const int64_t size, int device=0):
-    """Return buffer with CUDA-accessible memory on CPU host
-
-    Parameters
-    ----------
-    size : int
-      Specify the number of bytes to be allocated.
-    device : int
-      Specify GPU device number.
-
-    Returns
-    -------
-    dbuf : HostBuffer
-      Allocated host buffer
-    """
-    cdef shared_ptr[CCudaHostBuffer] buffer
-    with nogil:
-        buffer = GetResultValue(AllocateCudaHostBuffer(device, size))
-    return pyarrow_wrap_cudahostbuffer(buffer)
-
-
-def serialize_record_batch(object batch, object ctx):
-    """ Write record batch message to GPU device memory
-
-    Parameters
-    ----------
-    batch : RecordBatch
-      Record batch to write
-    ctx : Context
-      CUDA Context to allocate device memory from
-
-    Returns
-    -------
-    dbuf : CudaBuffer
-      device buffer which contains the record batch message
-    """
-    cdef shared_ptr[CCudaBuffer] buffer
-    cdef CRecordBatch* batch_ = pyarrow_unwrap_batch(batch).get()
-    cdef CCudaContext* ctx_ = pyarrow_unwrap_cudacontext(ctx).get()
-    with nogil:
-        buffer = GetResultValue(CudaSerializeRecordBatch(batch_[0], ctx_))
-    return pyarrow_wrap_cudabuffer(buffer)
-
-
-def read_message(object source, pool=None):
-    """ Read Arrow IPC message located on GPU device
-
-    Parameters
-    ----------
-    source : {CudaBuffer, cuda.BufferReader}
-      Device buffer or reader of device buffer.
-    pool : MemoryPool (optional)
-      Pool to allocate CPU memory for the metadata
-
-    Returns
-    -------
-    message : Message
-      The deserialized message, body still on device
-    """
-    cdef:
-        Message result = Message.__new__(Message)
-    cdef CMemoryPool* pool_ = maybe_unbox_memory_pool(pool)
-    if not isinstance(source, BufferReader):
-        reader = BufferReader(source)
-    with nogil:
-        result.message = move(
-            GetResultValue(ReadMessage(reader.reader, pool_)))
-    return result
-
-
-def read_record_batch(object buffer, object schema, *,
-                      DictionaryMemo dictionary_memo=None, pool=None):
-    """Construct RecordBatch referencing IPC message located on CUDA device.
-
-    While the metadata is copied to host memory for deserialization,
-    the record batch data remains on the device.
-
-    Parameters
-    ----------
-    buffer :
-      Device buffer containing the complete IPC message
-    schema : Schema
-      The schema for the record batch
-    dictionary_memo : DictionaryMemo, optional
-        If message contains dictionaries, must pass a populated
-        DictionaryMemo
-    pool : MemoryPool (optional)
-      Pool to allocate metadata from
-
-    Returns
-    -------
-    batch : RecordBatch
-      Reconstructed record batch, with device pointers
-
-    """
-    cdef:
-        shared_ptr[CSchema] schema_ = pyarrow_unwrap_schema(schema)
-        shared_ptr[CCudaBuffer] buffer_ = pyarrow_unwrap_cudabuffer(buffer)
-        CDictionaryMemo temp_memo
-        CDictionaryMemo* arg_dict_memo
-        CMemoryPool* pool_ = maybe_unbox_memory_pool(pool)
-        shared_ptr[CRecordBatch] batch
-
-    if dictionary_memo is not None:
-        arg_dict_memo = dictionary_memo.memo
-    else:
-        arg_dict_memo = &temp_memo
-
-    with nogil:
-        batch = GetResultValue(CudaReadRecordBatch(
-            schema_, arg_dict_memo, buffer_, pool_))
-    return pyarrow_wrap_batch(batch)
-
-
-# Public API
-
-
-cdef public api bint pyarrow_is_buffer(object buffer):
-    return isinstance(buffer, Buffer)
-
-# cudabuffer
-
-cdef public api bint pyarrow_is_cudabuffer(object buffer):
-    return isinstance(buffer, CudaBuffer)
-
-
-cdef public api object \
-        pyarrow_wrap_cudabuffer_base(const shared_ptr[CCudaBuffer]& buf, base):
-    cdef CudaBuffer result = CudaBuffer.__new__(CudaBuffer)
-    result.init_cuda(buf, base)
-    return result
-
-
-cdef public api object \
-        pyarrow_wrap_cudabuffer(const shared_ptr[CCudaBuffer]& buf):
-    cdef CudaBuffer result = CudaBuffer.__new__(CudaBuffer)
-    result.init_cuda(buf, None)
-    return result
-
-
-cdef public api shared_ptr[CCudaBuffer] pyarrow_unwrap_cudabuffer(object obj):
-    if pyarrow_is_cudabuffer(obj):
-        return (<CudaBuffer>obj).cuda_buffer
-    raise TypeError('expected CudaBuffer instance, got %s'
-                    % (type(obj).__name__))
-
-# cudahostbuffer
-
-cdef public api bint pyarrow_is_cudahostbuffer(object buffer):
-    return isinstance(buffer, HostBuffer)
-
-
-cdef public api object \
-        pyarrow_wrap_cudahostbuffer(const shared_ptr[CCudaHostBuffer]& buf):
-    cdef HostBuffer result = HostBuffer.__new__(HostBuffer)
-    result.init_host(buf)
-    return result
-
-
-cdef public api shared_ptr[CCudaHostBuffer] \
-        pyarrow_unwrap_cudahostbuffer(object obj):
-    if pyarrow_is_cudahostbuffer(obj):
-        return (<HostBuffer>obj).host_buffer
-    raise TypeError('expected HostBuffer instance, got %s'
-                    % (type(obj).__name__))
-
-# cudacontext
-
-cdef public api bint pyarrow_is_cudacontext(object ctx):
-    return isinstance(ctx, Context)
-
-
-cdef public api object \
-        pyarrow_wrap_cudacontext(const shared_ptr[CCudaContext]& ctx):
-    cdef Context result = Context.__new__(Context)
-    result.init(ctx)
-    return result
-
-
-cdef public api shared_ptr[CCudaContext] \
-        pyarrow_unwrap_cudacontext(object obj):
-    if pyarrow_is_cudacontext(obj):
-        return (<Context>obj).context
-    raise TypeError('expected Context instance, got %s'
-                    % (type(obj).__name__))
-
-# cudaipcmemhandle
-
-cdef public api bint pyarrow_is_cudaipcmemhandle(object handle):
-    return isinstance(handle, IpcMemHandle)
-
-
-cdef public api object \
-        pyarrow_wrap_cudaipcmemhandle(shared_ptr[CCudaIpcMemHandle]& h):
-    cdef IpcMemHandle result = IpcMemHandle.__new__(IpcMemHandle)
-    result.init(h)
-    return result
-
-
-cdef public api shared_ptr[CCudaIpcMemHandle] \
-        pyarrow_unwrap_cudaipcmemhandle(object obj):
-    if pyarrow_is_cudaipcmemhandle(obj):
-        return (<IpcMemHandle>obj).handle
-    raise TypeError('expected IpcMemHandle instance, got %s'
-                    % (type(obj).__name__))
diff --git a/python/pyarrow/_dataset.pyx b/python/pyarrow/_dataset.pyx
deleted file mode 100644
index 6199428..0000000
--- a/python/pyarrow/_dataset.pyx
+++ /dev/null
@@ -1,2977 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# cython: language_level = 3
-
-"""Dataset is currently unstable. APIs subject to change without notice."""
-
-from cpython.object cimport Py_LT, Py_EQ, Py_GT, Py_LE, Py_NE, Py_GE
-from cython.operator cimport dereference as deref
-
-import collections
-import os
-import warnings
-
-import pyarrow as pa
-from pyarrow.lib cimport *
-from pyarrow.lib import ArrowTypeError, frombytes, tobytes
-from pyarrow.includes.libarrow_dataset cimport *
-from pyarrow._fs cimport FileSystem, FileInfo, FileSelector
-from pyarrow._csv cimport ConvertOptions, ParseOptions, ReadOptions
-from pyarrow.util import _is_iterable, _is_path_like, _stringify_path
-
-from pyarrow._parquet cimport (
-    _create_writer_properties, _create_arrow_writer_properties,
-    FileMetaData, RowGroupMetaData, ColumnChunkMetaData
-)
-
-
-def _forbid_instantiation(klass, subclasses_instead=True):
-    msg = '{} is an abstract class thus cannot be initialized.'.format(
-        klass.__name__
-    )
-    if subclasses_instead:
-        subclasses = [cls.__name__ for cls in klass.__subclasses__]
-        msg += ' Use one of the subclasses instead: {}'.format(
-            ', '.join(subclasses)
-        )
-    raise TypeError(msg)
-
-
-cdef CFileSource _make_file_source(object file, FileSystem filesystem=None):
-
-    cdef:
-        CFileSource c_source
-        shared_ptr[CFileSystem] c_filesystem
-        c_string c_path
-        shared_ptr[CRandomAccessFile] c_file
-        shared_ptr[CBuffer] c_buffer
-
-    if isinstance(file, Buffer):
-        c_buffer = pyarrow_unwrap_buffer(file)
-        c_source = CFileSource(move(c_buffer))
-
-    elif _is_path_like(file):
-        if filesystem is None:
-            raise ValueError("cannot construct a FileSource from "
-                             "a path without a FileSystem")
-        c_filesystem = filesystem.unwrap()
-        c_path = tobytes(_stringify_path(file))
-        c_source = CFileSource(move(c_path), move(c_filesystem))
-
-    elif hasattr(file, 'read'):
-        # Optimistically hope this is file-like
-        c_file = get_native_file(file, False).get_random_access_file()
-        c_source = CFileSource(move(c_file))
-
-    else:
-        raise TypeError("cannot construct a FileSource "
-                        "from " + str(file))
-
-    return c_source
-
-
-cdef class Expression(_Weakrefable):
-    """
-    A logical expression to be evaluated against some input.
-
-    To create an expression:
-
-    - Use the factory function ``pyarrow.dataset.scalar()`` to create a
-      scalar (not necessary when combined, see example below).
-    - Use the factory function ``pyarrow.dataset.field()`` to reference
-      a field (column in table).
-    - Compare fields and scalars with ``<``, ``<=``, ``==``, ``>=``, ``>``.
-    - Combine expressions using python operators ``&`` (logical and),
-      ``|`` (logical or) and ``~`` (logical not).
-      Note: python keywords ``and``, ``or`` and ``not`` cannot be used
-      to combine expressions.
-    - Check whether the expression is contained in a list of values with
-      the ``pyarrow.dataset.Expression.isin()`` member function.
-
-    Examples
-    --------
-
-    >>> import pyarrow.dataset as ds
-    >>> (ds.field("a") < ds.scalar(3)) | (ds.field("b") > 7)
-    <pyarrow.dataset.Expression ((a < 3:int64) or (b > 7:int64))>
-    >>> ds.field('a') != 3
-    <pyarrow.dataset.Expression (a != 3)>
-    >>> ds.field('a').isin([1, 2, 3])
-    <pyarrow.dataset.Expression (a is in [
-      1,
-      2,
-      3
-    ])>
-    """
-    cdef:
-        CExpression expr
-
-    def __init__(self):
-        _forbid_instantiation(self.__class__)
-
-    cdef void init(self, const CExpression& sp):
-        self.expr = sp
-
-    @staticmethod
-    cdef wrap(const CExpression& sp):
-        cdef Expression self = Expression.__new__(Expression)
-        self.init(sp)
-        return self
-
-    cdef inline CExpression unwrap(self):
-        return self.expr
-
-    def equals(self, Expression other):
-        return self.expr.Equals(other.unwrap())
-
-    def __str__(self):
-        return frombytes(self.expr.ToString())
-
-    def __repr__(self):
-        return "<pyarrow.dataset.{0} {1}>".format(
-            self.__class__.__name__, str(self)
-        )
-
-    @staticmethod
-    def _deserialize(Buffer buffer not None):
-        return Expression.wrap(GetResultValue(CDeserializeExpression(
-            pyarrow_unwrap_buffer(buffer))))
-
-    def __reduce__(self):
-        buffer = pyarrow_wrap_buffer(GetResultValue(
-            CSerializeExpression(self.expr)))
-        return Expression._deserialize, (buffer,)
-
-    @staticmethod
-    cdef Expression _expr_or_scalar(object expr):
-        if isinstance(expr, Expression):
-            return (<Expression> expr)
-        return (<Expression> Expression._scalar(expr))
-
-    @staticmethod
-    cdef Expression _call(str function_name, list arguments,
-                          shared_ptr[CFunctionOptions] options=(
-                              <shared_ptr[CFunctionOptions]> nullptr)):
-        cdef:
-            vector[CExpression] c_arguments
-
-        for argument in arguments:
-            c_arguments.push_back((<Expression> argument).expr)
-
-        return Expression.wrap(CMakeCallExpression(tobytes(function_name),
-                                                   move(c_arguments), options))
-
-    def __richcmp__(self, other, int op):
-        other = Expression._expr_or_scalar(other)
-        return Expression._call({
-            Py_EQ: "equal",
-            Py_NE: "not_equal",
-            Py_GT: "greater",
-            Py_GE: "greater_equal",
-            Py_LT: "less",
-            Py_LE: "less_equal",
-        }[op], [self, other])
-
-    def __bool__(self):
-        raise ValueError(
-            "An Expression cannot be evaluated to python True or False. "
-            "If you are using the 'and', 'or' or 'not' operators, use '&', "
-            "'|' or '~' instead."
-        )
-
-    def __invert__(self):
-        return Expression._call("invert", [self])
-
-    def __and__(Expression self, other):
-        other = Expression._expr_or_scalar(other)
-        return Expression._call("and_kleene", [self, other])
-
-    def __or__(Expression self, other):
-        other = Expression._expr_or_scalar(other)
-        return Expression._call("or_kleene", [self, other])
-
-    def __add__(Expression self, other):
-        other = Expression._expr_or_scalar(other)
-        return Expression._call("add_checked", [self, other])
-
-    def __mul__(Expression self, other):
-        other = Expression._expr_or_scalar(other)
-        return Expression._call("multiply_checked", [self, other])
-
-    def __sub__(Expression self, other):
-        other = Expression._expr_or_scalar(other)
-        return Expression._call("subtract_checked", [self, other])
-
-    def __truediv__(Expression self, other):
-        other = Expression._expr_or_scalar(other)
-        return Expression._call("divide_checked", [self, other])
-
-    def is_valid(self):
-        """Checks whether the expression is not-null (valid)"""
-        return Expression._call("is_valid", [self])
-
-    def is_null(self):
-        """Checks whether the expression is null"""
-        return Expression._call("is_null", [self])
-
-    def cast(self, type, bint safe=True):
-        """Explicitly change the expression's data type"""
-        cdef shared_ptr[CCastOptions] c_options
-        c_options.reset(new CCastOptions(safe))
-        c_options.get().to_type = pyarrow_unwrap_data_type(ensure_type(type))
-        return Expression._call("cast", [self],
-                                <shared_ptr[CFunctionOptions]> c_options)
-
-    def isin(self, values):
-        """Checks whether the expression is contained in values"""
-        cdef:
-            shared_ptr[CFunctionOptions] c_options
-            CDatum c_values
-
-        if not isinstance(values, pa.Array):
-            values = pa.array(values)
-
-        c_values = CDatum(pyarrow_unwrap_array(values))
-        c_options.reset(new CSetLookupOptions(c_values, True))
-        return Expression._call("is_in", [self], c_options)
-
-    @staticmethod
-    def _field(str name not None):
-        return Expression.wrap(CMakeFieldExpression(tobytes(name)))
-
-    @staticmethod
-    def _scalar(value):
-        cdef:
-            Scalar scalar
-
-        if isinstance(value, Scalar):
-            scalar = value
-        else:
-            scalar = pa.scalar(value)
-
-        return Expression.wrap(CMakeScalarExpression(scalar.unwrap()))
-
-
-_deserialize = Expression._deserialize
-cdef Expression _true = Expression._scalar(True)
-
-
-cdef class Dataset(_Weakrefable):
-    """
-    Collection of data fragments and potentially child datasets.
-
-    Arrow Datasets allow you to query against data that has been split across
-    multiple files. This sharding of data may indicate partitioning, which
-    can accelerate queries that only touch some partitions (files).
-    """
-
-    cdef:
-        shared_ptr[CDataset] wrapped
-        CDataset* dataset
-
-    def __init__(self):
-        _forbid_instantiation(self.__class__)
-
-    cdef void init(self, const shared_ptr[CDataset]& sp):
-        self.wrapped = sp
-        self.dataset = sp.get()
-
-    @staticmethod
-    cdef wrap(const shared_ptr[CDataset]& sp):
-        type_name = frombytes(sp.get().type_name())
-
-        classes = {
-            'union': UnionDataset,
-            'filesystem': FileSystemDataset,
-        }
-
-        class_ = classes.get(type_name, None)
-        if class_ is None:
-            raise TypeError(type_name)
-
-        cdef Dataset self = class_.__new__(class_)
-        self.init(sp)
-        return self
-
-    cdef shared_ptr[CDataset] unwrap(self) nogil:
-        return self.wrapped
-
-    @property
-    def partition_expression(self):
-        """
-        An Expression which evaluates to true for all data viewed by this
-        Dataset.
-        """
-        return Expression.wrap(self.dataset.partition_expression())
-
-    def replace_schema(self, Schema schema not None):
-        """
-        Return a copy of this Dataset with a different schema.
-
-        The copy will view the same Fragments. If the new schema is not
-        compatible with the original dataset's schema then an error will
-        be raised.
-        """
-        cdef shared_ptr[CDataset] copy = GetResultValue(
-            self.dataset.ReplaceSchema(pyarrow_unwrap_schema(schema)))
-        return Dataset.wrap(move(copy))
-
-    def get_fragments(self, Expression filter=None):
-        """Returns an iterator over the fragments in this dataset.
-
-        Parameters
-        ----------
-        filter : Expression, default None
-            Return fragments matching the optional filter, either using the
-            partition_expression or internal information like Parquet's
-            statistics.
-
-        Returns
-        -------
-        fragments : iterator of Fragment
-        """
-        cdef:
-            CExpression c_filter
-            CFragmentIterator c_iterator
-
-        if filter is None:
-            c_fragments = move(GetResultValue(self.dataset.GetFragments()))
-        else:
-            c_filter = _bind(filter, self.schema)
-            c_fragments = move(GetResultValue(
-                self.dataset.GetFragments(c_filter)))
-
-        for maybe_fragment in c_fragments:
-            yield Fragment.wrap(GetResultValue(move(maybe_fragment)))
-
-    def _scanner(self, **kwargs):
-        return Scanner.from_dataset(self, **kwargs)
-
-    def scan(self, **kwargs):
-        """Builds a scan operation against the dataset.
-
-        It produces a stream of ScanTasks which is meant to be a unit of work
-        to be dispatched. The tasks are not executed automatically, the user is
-        responsible to execute and dispatch the individual tasks, so custom
-        local task scheduling can be implemented.
-
-        .. deprecated:: 4.0.0
-           Use `to_batches` instead.
-
-        Parameters
-        ----------
-        columns : list of str, default None
-            The columns to project. This can be a list of column names to
-            include (order and duplicates will be preserved), or a dictionary
-            with {new_column_name: expression} values for more advanced
-            projections.
-            The columns will be passed down to Datasets and corresponding data
-            fragments to avoid loading, copying, and deserializing columns
-            that will not be required further down the compute chain.
-            By default all of the available columns are projected. Raises
-            an exception if any of the referenced column names does not exist
-            in the dataset's Schema.
-        filter : Expression, default None
-            Scan will return only the rows matching the filter.
-            If possible the predicate will be pushed down to exploit the
-            partition information or internal metadata found in the data
-            source, e.g. Parquet statistics. Otherwise filters the loaded
-            RecordBatches before yielding them.
-        batch_size : int, default 1M
-            The maximum row count for scanned record batches. If scanned
-            record batches are overflowing memory then this method can be
-            called to reduce their size.
-        use_threads : bool, default True
-            If enabled, then maximum parallelism will be used determined by
-            the number of available CPU cores.
-        memory_pool : MemoryPool, default None
-            For memory allocations, if required. If not specified, uses the
-            default pool.
-        fragment_scan_options : FragmentScanOptions, default None
-            Options specific to a particular scan and fragment type, which
-            can change between different scans of the same dataset.
-
-        Returns
-        -------
-        scan_tasks : iterator of ScanTask
-
-        Examples
-        --------
-        >>> import pyarrow.dataset as ds
-        >>> dataset = ds.dataset("path/to/dataset")
-
-        Selecting a subset of the columns:
-
-        >>> dataset.scan(columns=["A", "B"])
-
-        Projecting selected columns using an expression:
-
-        >>> dataset.scan(columns={"A_int": ds.field("A").cast("int64")})
-
-        Filtering rows while scanning:
-
-        >>> dataset.scan(filter=ds.field("A") > 0)
-        """
-        return self._scanner(**kwargs).scan()
-
-    def to_batches(self, **kwargs):
-        """Read the dataset as materialized record batches.
-
-        Builds a scan operation against the dataset and sequentially executes
-        the ScanTasks as the returned generator gets consumed.
-
-        See scan method parameters documentation.
-
-        Returns
-        -------
-        record_batches : iterator of RecordBatch
-        """
-        return self._scanner(**kwargs).to_batches()
-
-    def to_table(self, **kwargs):
-        """Read the dataset to an arrow table.
-
-        Note that this method reads all the selected data from the dataset
-        into memory.
-
-        See scan method parameters documentation.
-
-        Returns
-        -------
-        table : Table instance
-        """
-        return self._scanner(**kwargs).to_table()
-
-    def head(self, int num_rows, **kwargs):
-        """Load the first N rows of the dataset.
-
-        See scan method parameters documentation.
-
-        Returns
-        -------
-        table : Table instance
-        """
-        return self._scanner(**kwargs).head(num_rows)
-
-    @property
-    def schema(self):
-        """The common schema of the full Dataset"""
-        return pyarrow_wrap_schema(self.dataset.schema())
-
-
-cdef class InMemoryDataset(Dataset):
-    """A Dataset wrapping in-memory data.
-
-    Parameters
-    ----------
-    source
-        The data for this dataset. Can be a RecordBatch, Table, list of
-        RecordBatch/Table, iterable of RecordBatch, or a RecordBatchReader.
-        If an iterable is provided, the schema must also be provided.
-    schema : Schema, optional
-        Only required if passing an iterable as the source.
-    """
-
-    cdef:
-        CInMemoryDataset* in_memory_dataset
-
-    def __init__(self, source, Schema schema=None):
-        cdef:
-            RecordBatchReader reader
-            shared_ptr[CInMemoryDataset] in_memory_dataset
-
-        if isinstance(source, (pa.RecordBatch, pa.Table)):
-            source = [source]
-
-        if isinstance(source, (list, tuple)):
-            batches = []
-            for item in source:
-                if isinstance(item, pa.RecordBatch):
-                    batches.append(item)
-                elif isinstance(item, pa.Table):
-                    batches.extend(item.to_batches())
-                else:
-                    raise TypeError(
-                        'Expected a list of tables or batches. The given list '
-                        'contains a ' + type(item).__name__)
-                if schema is None:
-                    schema = item.schema
-                elif not schema.equals(item.schema):
-                    raise ArrowTypeError(
-                        f'Item has schema\n{item.schema}\nwhich does not '
-                        f'match expected schema\n{schema}')
-            if not batches and schema is None:
-                raise ValueError('Must provide schema to construct in-memory '
-                                 'dataset from an empty list')
-            table = pa.Table.from_batches(batches, schema=schema)
-            in_memory_dataset = make_shared[CInMemoryDataset](
-                pyarrow_unwrap_table(table))
-        elif isinstance(source, pa.ipc.RecordBatchReader):
-            reader = source
-            in_memory_dataset = make_shared[CInMemoryDataset](reader.reader)
-        elif _is_iterable(source):
-            if schema is None:
-                raise ValueError('Must provide schema to construct in-memory '
-                                 'dataset from an iterable')
-            reader = pa.ipc.RecordBatchReader.from_batches(schema, source)
-            in_memory_dataset = make_shared[CInMemoryDataset](reader.reader)
-        else:
-            raise TypeError(
-                'Expected a table, batch, iterable of tables/batches, or a '
-                'record batch reader instead of the given type: ' +
-                type(source).__name__
-            )
-
-        self.init(<shared_ptr[CDataset]> in_memory_dataset)
-
-    cdef void init(self, const shared_ptr[CDataset]& sp):
-        Dataset.init(self, sp)
-        self.in_memory_dataset = <CInMemoryDataset*> sp.get()
-
-
-cdef class UnionDataset(Dataset):
-    """A Dataset wrapping child datasets.
-
-    Children's schemas must agree with the provided schema.
-
-    Parameters
-    ----------
-    schema : Schema
-        A known schema to conform to.
-    children : list of Dataset
-        One or more input children
-    """
-
-    cdef:
-        CUnionDataset* union_dataset
-
-    def __init__(self, Schema schema not None, children):
-        cdef:
-            Dataset child
-            CDatasetVector c_children
-            shared_ptr[CUnionDataset] union_dataset
-
-        for child in children:
-            c_children.push_back(child.wrapped)
-
-        union_dataset = GetResultValue(CUnionDataset.Make(
-            pyarrow_unwrap_schema(schema), move(c_children)))
-        self.init(<shared_ptr[CDataset]> union_dataset)
-
-    cdef void init(self, const shared_ptr[CDataset]& sp):
-        Dataset.init(self, sp)
-        self.union_dataset = <CUnionDataset*> sp.get()
-
-    def __reduce__(self):
-        return UnionDataset, (self.schema, self.children)
-
-    @property
-    def children(self):
-        cdef CDatasetVector children = self.union_dataset.children()
-        return [Dataset.wrap(children[i]) for i in range(children.size())]
-
-
-cdef class FileSystemDataset(Dataset):
-    """A Dataset of file fragments.
-
-    A FileSystemDataset is composed of one or more FileFragment.
-
-    Parameters
-    ----------
-    fragments : list[Fragments]
-        List of fragments to consume.
-    schema : Schema
-        The top-level schema of the Dataset.
-    format : FileFormat
-        File format of the fragments, currently only ParquetFileFormat,
-        IpcFileFormat, and CsvFileFormat are supported.
-    filesystem : FileSystem
-        FileSystem of the fragments.
-    root_partition : Expression, optional
-        The top-level partition of the DataDataset.
-    """
-
-    cdef:
-        CFileSystemDataset* filesystem_dataset
-
-    def __init__(self, fragments, Schema schema, FileFormat format,
-                 FileSystem filesystem=None, root_partition=None):
-        cdef:
-            FileFragment fragment=None
-            vector[shared_ptr[CFileFragment]] c_fragments
-            CResult[shared_ptr[CDataset]] result
-            shared_ptr[CFileSystem] c_filesystem
-
-        if root_partition is None:
-            root_partition = _true
-        elif not isinstance(root_partition, Expression):
-            raise TypeError(
-                "Argument 'root_partition' has incorrect type (expected "
-                "Epression, got {0})".format(type(root_partition))
-            )
-
-        for fragment in fragments:
-            c_fragments.push_back(
-                static_pointer_cast[CFileFragment, CFragment](
-                    fragment.unwrap()))
-
-            if filesystem is None:
-                filesystem = fragment.filesystem
-
-        if filesystem is not None:
-            c_filesystem = filesystem.unwrap()
-
-        result = CFileSystemDataset.Make(
-            pyarrow_unwrap_schema(schema),
-            (<Expression> root_partition).unwrap(),
-            format.unwrap(),
-            c_filesystem,
-            c_fragments
-        )
-        self.init(GetResultValue(result))
-
-    @property
-    def filesystem(self):
-        return FileSystem.wrap(self.filesystem_dataset.filesystem())
-
-    cdef void init(self, const shared_ptr[CDataset]& sp):
-        Dataset.init(self, sp)
-        self.filesystem_dataset = <CFileSystemDataset*> sp.get()
-
-    def __reduce__(self):
-        return FileSystemDataset, (
-            list(self.get_fragments()),
-            self.schema,
-            self.format,
-            self.filesystem,
-            self.partition_expression
-        )
-
-    @classmethod
-    def from_paths(cls, paths, schema=None, format=None,
-                   filesystem=None, partitions=None, root_partition=None):
-        """A Dataset created from a list of paths on a particular filesystem.
-
-        Parameters
-        ----------
-        paths : list of str
-            List of file paths to create the fragments from.
-        schema : Schema
-            The top-level schema of the DataDataset.
-        format : FileFormat
-            File format to create fragments from, currently only
-            ParquetFileFormat, IpcFileFormat, and CsvFileFormat are supported.
-        filesystem : FileSystem
-            The filesystem which files are from.
-        partitions : List[Expression], optional
-            Attach additional partition information for the file paths.
-        root_partition : Expression, optional
-            The top-level partition of the DataDataset.
-        """
-        cdef:
-            FileFragment fragment
-
-        if root_partition is None:
-            root_partition = _true
-
-        for arg, class_, name in [
-            (schema, Schema, 'schema'),
-            (format, FileFormat, 'format'),
-            (filesystem, FileSystem, 'filesystem'),
-            (root_partition, Expression, 'root_partition')
-        ]:
-            if not isinstance(arg, class_):
-                raise TypeError(
-                    "Argument '{0}' has incorrect type (expected {1}, "
-                    "got {2})".format(name, class_.__name__, type(arg))
-                )
-
-        partitions = partitions or [_true] * len(paths)
-
-        if len(paths) != len(partitions):
-            raise ValueError(
-                'The number of files resulting from paths_or_selector '
-                'must be equal to the number of partitions.'
-            )
-
-        fragments = [
-            format.make_fragment(path, filesystem, partitions[i])
-            for i, path in enumerate(paths)
-        ]
-        return FileSystemDataset(fragments, schema, format,
-                                 filesystem, root_partition)
-
-    @property
-    def files(self):
-        """List of the files"""
-        cdef vector[c_string] files = self.filesystem_dataset.files()
-        return [frombytes(f) for f in files]
-
-    @property
-    def format(self):
-        """The FileFormat of this source."""
-        return FileFormat.wrap(self.filesystem_dataset.format())
-
-
-cdef CExpression _bind(Expression filter, Schema schema) except *:
-    assert schema is not None
-
-    if filter is None:
-        return _true.unwrap()
-
-    return GetResultValue(filter.unwrap().Bind(
-        deref(pyarrow_unwrap_schema(schema).get())))
-
-
-cdef class FileWriteOptions(_Weakrefable):
-
-    cdef:
-        shared_ptr[CFileWriteOptions] wrapped
-        CFileWriteOptions* options
-
-    def __init__(self):
-        _forbid_instantiation(self.__class__)
-
-    cdef void init(self, const shared_ptr[CFileWriteOptions]& sp):
-        self.wrapped = sp
-        self.options = sp.get()
-
-    @staticmethod
-    cdef wrap(const shared_ptr[CFileWriteOptions]& sp):
-        type_name = frombytes(sp.get().type_name())
-
-        classes = {
-            'ipc': IpcFileWriteOptions,
-            'parquet': ParquetFileWriteOptions,
-        }
-
-        class_ = classes.get(type_name, None)
-        if class_ is None:
-            raise TypeError(type_name)
-
-        cdef FileWriteOptions self = class_.__new__(class_)
-        self.init(sp)
-        return self
-
-    @property
-    def format(self):
-        return FileFormat.wrap(self.options.format())
-
-    cdef inline shared_ptr[CFileWriteOptions] unwrap(self):
-        return self.wrapped
-
-
-cdef class FileFormat(_Weakrefable):
-
-    cdef:
-        shared_ptr[CFileFormat] wrapped
-        CFileFormat* format
-
-    def __init__(self):
-        _forbid_instantiation(self.__class__)
-
-    cdef void init(self, const shared_ptr[CFileFormat]& sp):
-        self.wrapped = sp
-        self.format = sp.get()
-
-    @staticmethod
-    cdef wrap(const shared_ptr[CFileFormat]& sp):
-        type_name = frombytes(sp.get().type_name())
-
-        classes = {
-            'ipc': IpcFileFormat,
-            'csv': CsvFileFormat,
-            'parquet': ParquetFileFormat,
-        }
-
-        class_ = classes.get(type_name, None)
-        if class_ is None:
-            raise TypeError(type_name)
-
-        cdef FileFormat self = class_.__new__(class_)
-        self.init(sp)
-        return self
-
-    cdef inline shared_ptr[CFileFormat] unwrap(self):
-        return self.wrapped
-
-    def inspect(self, file, filesystem=None):
-        """Infer the schema of a file."""
-        c_source = _make_file_source(file, filesystem)
-        c_schema = GetResultValue(self.format.Inspect(c_source))
-        return pyarrow_wrap_schema(move(c_schema))
-
-    def make_fragment(self, file, filesystem=None,
-                      Expression partition_expression=None):
-        """
-        Make a FileFragment of this FileFormat. The filter may not reference
-        fields absent from the provided schema. If no schema is provided then
-        one will be inferred.
-        """
-        if partition_expression is None:
-            partition_expression = _true
-
-        c_source = _make_file_source(file, filesystem)
-        c_fragment = <shared_ptr[CFragment]> GetResultValue(
-            self.format.MakeFragment(move(c_source),
-                                     partition_expression.unwrap(),
-                                     <shared_ptr[CSchema]>nullptr))
-        return Fragment.wrap(move(c_fragment))
-
-    def make_write_options(self):
-        return FileWriteOptions.wrap(self.format.DefaultWriteOptions())
-
-    @property
-    def default_extname(self):
-        return frombytes(self.format.type_name())
-
-    @property
-    def default_fragment_scan_options(self):
-        return FragmentScanOptions.wrap(
-            self.wrapped.get().default_fragment_scan_options)
-
-    @default_fragment_scan_options.setter
-    def default_fragment_scan_options(self, FragmentScanOptions options):
-        if options is None:
-            self.wrapped.get().default_fragment_scan_options =\
-                <shared_ptr[CFragmentScanOptions]>nullptr
-        else:
-            self._set_default_fragment_scan_options(options)
-
-    cdef _set_default_fragment_scan_options(self, FragmentScanOptions options):
-        raise ValueError(f"Cannot set fragment scan options for "
-                         f"'{options.type_name}' on {self.__class__.__name__}")
-
-    def __eq__(self, other):
-        try:
-            return self.equals(other)
-        except TypeError:
-            return False
-
-
-cdef class Fragment(_Weakrefable):
-    """Fragment of data from a Dataset."""
-
-    cdef:
-        shared_ptr[CFragment] wrapped
-        CFragment* fragment
-
-    def __init__(self):
-        _forbid_instantiation(self.__class__)
-
-    cdef void init(self, const shared_ptr[CFragment]& sp):
-        self.wrapped = sp
-        self.fragment = sp.get()
-
-    @staticmethod
-    cdef wrap(const shared_ptr[CFragment]& sp):
-        type_name = frombytes(sp.get().type_name())
-
-        classes = {
-            # IpcFileFormat and CsvFileFormat do not have corresponding
-            # subclasses of FileFragment
-            'ipc': FileFragment,
-            'csv': FileFragment,
-            'parquet': ParquetFileFragment,
-        }
-
-        class_ = classes.get(type_name, None)
-        if class_ is None:
-            class_ = Fragment
-
-        cdef Fragment self = class_.__new__(class_)
-        self.init(sp)
-        return self
-
-    cdef inline shared_ptr[CFragment] unwrap(self):
-        return self.wrapped
-
-    @property
-    def physical_schema(self):
-        """Return the physical schema of this Fragment. This schema can be
-        different from the dataset read schema."""
-        cdef:
-            shared_ptr[CSchema] c_schema
-
-        c_schema = GetResultValue(self.fragment.ReadPhysicalSchema())
-        return pyarrow_wrap_schema(c_schema)
-
-    @property
-    def partition_expression(self):
-        """An Expression which evaluates to true for all data viewed by this
-        Fragment.
-        """
-        return Expression.wrap(self.fragment.partition_expression())
-
-    def _scanner(self, **kwargs):
-        return Scanner.from_fragment(self, **kwargs)
-
-    def scan(self, Schema schema=None, **kwargs):
-        """Builds a scan operation against the dataset.
-
-        It produces a stream of ScanTasks which is meant to be a unit of work
-        to be dispatched. The tasks are not executed automatically, the user is
-        responsible to execute and dispatch the individual tasks, so custom
-        local task scheduling can be implemented.
-
-        .. deprecated:: 4.0.0
-           Use `to_batches` instead.
-
-        Parameters
-        ----------
-        schema : Schema
-            Schema to use for scanning. This is used to unify a Fragment to
-            it's Dataset's schema. If not specified this will use the
-            Fragment's physical schema which might differ for each Fragment.
-        columns : list of str, default None
-            The columns to project. This can be a list of column names to
-            include (order and duplicates will be preserved), or a dictionary
-            with {new_column_name: expression} values for more advanced
-            projections.
-            The columns will be passed down to Datasets and corresponding data
-            fragments to avoid loading, copying, and deserializing columns
-            that will not be required further down the compute chain.
-            By default all of the available columns are projected. Raises
-            an exception if any of the referenced column names does not exist
-            in the dataset's Schema.
-        filter : Expression, default None
-            Scan will return only the rows matching the filter.
-            If possible the predicate will be pushed down to exploit the
-            partition information or internal metadata found in the data
-            source, e.g. Parquet statistics. Otherwise filters the loaded
-            RecordBatches before yielding them.
-        batch_size : int, default 1M
-            The maximum row count for scanned record batches. If scanned
-            record batches are overflowing memory then this method can be
-            called to reduce their size.
-        use_threads : bool, default True
-            If enabled, then maximum parallelism will be used determined by
-            the number of available CPU cores.
-        memory_pool : MemoryPool, default None
-            For memory allocations, if required. If not specified, uses the
-            default pool.
-        fragment_scan_options : FragmentScanOptions, default None
-            Options specific to a particular scan and fragment type, which
-            can change between different scans of the same dataset.
-
-        Returns
-        -------
-        scan_tasks : iterator of ScanTask
-        """
-        return self._scanner(schema=schema, **kwargs).scan()
-
-    def to_batches(self, Schema schema=None, **kwargs):
-        """Read the fragment as materialized record batches.
-
-        See scan method parameters documentation.
-
-        Returns
-        -------
-        record_batches : iterator of RecordBatch
-        """
-        return self._scanner(schema=schema, **kwargs).to_batches()
-
-    def to_table(self, Schema schema=None, **kwargs):
-        """Convert this Fragment into a Table.
-
-        Use this convenience utility with care. This will serially materialize
-        the Scan result in memory before creating the Table.
-
-        See scan method parameters documentation.
-
-        Returns
-        -------
-        table : Table
-        """
-        return self._scanner(schema=schema, **kwargs).to_table()
-
-    def head(self, int num_rows, **kwargs):
-        """Load the first N rows of the fragment.
-
-        See scan method parameters documentation.
-
-        Returns
-        -------
-        table : Table instance
-        """
-        return self._scanner(**kwargs).head(num_rows)
-
-
-cdef class FileFragment(Fragment):
-    """A Fragment representing a data file."""
-
-    cdef:
-        CFileFragment* file_fragment
-
-    cdef void init(self, const shared_ptr[CFragment]& sp):
-        Fragment.init(self, sp)
-        self.file_fragment = <CFileFragment*> sp.get()
-
-    def __reduce__(self):
-        buffer = self.buffer
-        return self.format.make_fragment, (
-            self.path if buffer is None else buffer,
-            self.filesystem,
-            self.partition_expression
-        )
-
-    @property
-    def path(self):
-        """
-        The path of the data file viewed by this fragment, if it views a
-        file. If instead it views a buffer, this will be "<Buffer>".
-        """
-        return frombytes(self.file_fragment.source().path())
-
-    @property
-    def filesystem(self):
-        """
-        The FileSystem containing the data file viewed by this fragment, if
-        it views a file. If instead it views a buffer, this will be None.
-        """
-        cdef:
-            shared_ptr[CFileSystem] c_fs
-        c_fs = self.file_fragment.source().filesystem()
-
-        if c_fs.get() == nullptr:
-            return None
-
-        return FileSystem.wrap(c_fs)
-
-    @property
-    def buffer(self):
-        """
-        The buffer viewed by this fragment, if it views a buffer. If
-        instead it views a file, this will be None.
-        """
-        cdef:
-            shared_ptr[CBuffer] c_buffer
-        c_buffer = self.file_fragment.source().buffer()
-
-        if c_buffer.get() == nullptr:
-            return None
-
-        return pyarrow_wrap_buffer(c_buffer)
-
-    @property
-    def format(self):
-        """
-        The format of the data file viewed by this fragment.
-        """
-        return FileFormat.wrap(self.file_fragment.format())
-
-
-class RowGroupInfo:
-    """A wrapper class for RowGroup information"""
-
-    def __init__(self, id, metadata, schema):
-        self.id = id
-        self.metadata = metadata
-        self.schema = schema
-
-    @property
-    def num_rows(self):
-        return self.metadata.num_rows
-
-    @property
-    def total_byte_size(self):
-        return self.metadata.total_byte_size
-
-    @property
-    def statistics(self):
-        def name_stats(i):
-            col = self.metadata.column(i)
-
-            stats = col.statistics
-            if stats is None or not stats.has_min_max:
-                return None, None
-
-            name = col.path_in_schema
-            field_index = self.schema.get_field_index(name)
-            if field_index < 0:
-                return None, None
-
-            typ = self.schema.field(field_index).type
-            return col.path_in_schema, {
-                'min': pa.scalar(stats.min, type=typ).as_py(),
-                'max': pa.scalar(stats.max, type=typ).as_py()
-            }
-
-        return {
-            name: stats for name, stats
-            in map(name_stats, range(self.metadata.num_columns))
-            if stats is not None
-        }
-
-    def __repr__(self):
-        return "RowGroupInfo({})".format(self.id)
-
-    def __eq__(self, other):
-        if isinstance(other, int):
-            return self.id == other
-        if not isinstance(other, RowGroupInfo):
-            return False
-        return self.id == other.id
-
-
-cdef class FragmentScanOptions(_Weakrefable):
-    """Scan options specific to a particular fragment and scan operation."""
-
-    cdef:
-        shared_ptr[CFragmentScanOptions] wrapped
-
-    def __init__(self):
-        _forbid_instantiation(self.__class__)
-
-    cdef void init(self, const shared_ptr[CFragmentScanOptions]& sp):
-        self.wrapped = sp
-
-    @staticmethod
-    cdef wrap(const shared_ptr[CFragmentScanOptions]& sp):
-        if not sp:
-            return None
-
-        type_name = frombytes(sp.get().type_name())
-
-        classes = {
-            'csv': CsvFragmentScanOptions,
-            'parquet': ParquetFragmentScanOptions,
-        }
-
-        class_ = classes.get(type_name, None)
-        if class_ is None:
-            raise TypeError(type_name)
-
-        cdef FragmentScanOptions self = class_.__new__(class_)
-        self.init(sp)
-        return self
-
-    @property
-    def type_name(self):
-        return frombytes(self.wrapped.get().type_name())
-
-    def __eq__(self, other):
-        try:
-            return self.equals(other)
-        except TypeError:
-            return False
-
-
-cdef class ParquetFileFragment(FileFragment):
-    """A Fragment representing a parquet file."""
-
-    cdef:
-        CParquetFileFragment* parquet_file_fragment
-
-    cdef void init(self, const shared_ptr[CFragment]& sp):
-        FileFragment.init(self, sp)
-        self.parquet_file_fragment = <CParquetFileFragment*> sp.get()
-
-    def __reduce__(self):
-        buffer = self.buffer
-        row_groups = [row_group.id for row_group in self.row_groups]
-        return self.format.make_fragment, (
-            self.path if buffer is None else buffer,
-            self.filesystem,
-            self.partition_expression,
-            row_groups
-        )
-
-    def ensure_complete_metadata(self):
-        """
-        Ensure that all metadata (statistics, physical schema, ...) have
-        been read and cached in this fragment.
-        """
-        check_status(self.parquet_file_fragment.EnsureCompleteMetadata())
-
-    @property
-    def row_groups(self):
-        metadata = self.metadata
-        cdef vector[int] row_groups = self.parquet_file_fragment.row_groups()
-        return [RowGroupInfo(i, metadata.row_group(i), self.physical_schema)
-                for i in row_groups]
-
-    @property
-    def metadata(self):
-        self.ensure_complete_metadata()
-        cdef FileMetaData metadata = FileMetaData()
-        metadata.init(self.parquet_file_fragment.metadata())
-        return metadata
-
-    @property
-    def num_row_groups(self):
-        """
-        Return the number of row groups viewed by this fragment (not the
-        number of row groups in the origin file).
-        """
-        self.ensure_complete_metadata()
-        return self.parquet_file_fragment.row_groups().size()
-
-    def split_by_row_group(self, Expression filter=None,
-                           Schema schema=None):
-        """
-        Split the fragment into multiple fragments.
-
-        Yield a Fragment wrapping each row group in this ParquetFileFragment.
-        Row groups will be excluded whose metadata contradicts the optional
-        filter.
-
-        Parameters
-        ----------
-        filter : Expression, default None
-            Only include the row groups which satisfy this predicate (using
-            the Parquet RowGroup statistics).
-        schema : Schema, default None
-            Schema to use when filtering row groups. Defaults to the
-            Fragment's phsyical schema
-
-        Returns
-        -------
-        A list of Fragments
-        """
-        cdef:
-            vector[shared_ptr[CFragment]] c_fragments
-            CExpression c_filter
-            shared_ptr[CFragment] c_fragment
-
-        schema = schema or self.physical_schema
-        c_filter = _bind(filter, schema)
-        with nogil:
-            c_fragments = move(GetResultValue(
-                self.parquet_file_fragment.SplitByRowGroup(move(c_filter))))
-
-        return [Fragment.wrap(c_fragment) for c_fragment in c_fragments]
-
-    def subset(self, Expression filter=None, Schema schema=None,
-               object row_group_ids=None):
-        """
-        Create a subset of the fragment (viewing a subset of the row groups).
-
-        Subset can be specified by either a filter predicate (with optional
-        schema) or by a list of row group IDs. Note that when using a filter,
-        the resulting fragment can be empty (viewing no row groups).
-
-        Parameters
-        ----------
-        filter : Expression, default None
-            Only include the row groups which satisfy this predicate (using
-            the Parquet RowGroup statistics).
-        schema : Schema, default None
-            Schema to use when filtering row groups. Defaults to the
-            Fragment's phsyical schema
-        row_group_ids : list of ints
-            The row group IDs to include in the subset. Can only be specified
-            if `filter` is None.
-
-        Returns
-        -------
-        ParquetFileFragment
-        """
-        cdef:
-            CExpression c_filter
-            vector[int] c_row_group_ids
-            shared_ptr[CFragment] c_fragment
-
-        if filter is not None and row_group_ids is not None:
-            raise ValueError(
-                "Cannot specify both 'filter' and 'row_group_ids'."
-            )
-
-        if filter is not None:
-            schema = schema or self.physical_schema
-            c_filter = _bind(filter, schema)
-            with nogil:
-                c_fragment = move(GetResultValue(
-                    self.parquet_file_fragment.SubsetWithFilter(
-                        move(c_filter))))
-        elif row_group_ids is not None:
-            c_row_group_ids = [
-                <int> row_group for row_group in sorted(set(row_group_ids))
-            ]
-            with nogil:
-                c_fragment = move(GetResultValue(
-                    self.parquet_file_fragment.SubsetWithIds(
-                        move(c_row_group_ids))))
-        else:
-            raise ValueError(
-                "Need to specify one of 'filter' or 'row_group_ids'"
-            )
-
-        return Fragment.wrap(c_fragment)
-
-
-cdef class ParquetReadOptions(_Weakrefable):
-    """
-    Parquet format specific options for reading.
-
-    Parameters
-    ----------
-    dictionary_columns : list of string, default None
-        Names of columns which should be dictionary encoded as
-        they are read.
-    """
-
-    cdef public:
-        set dictionary_columns
-
-    # Also see _PARQUET_READ_OPTIONS
-    def __init__(self, dictionary_columns=None):
-        self.dictionary_columns = set(dictionary_columns or set())
-
-    def equals(self, ParquetReadOptions other):
-        return self.dictionary_columns == other.dictionary_columns
-
-    def __eq__(self, other):
-        try:
-            return self.equals(other)
-        except TypeError:
-            return False
-
-    def __repr__(self):
-        return (f"<ParquetReadOptions"
-                f" dictionary_columns={self.dictionary_columns}>")
-
-
-cdef class ParquetFileWriteOptions(FileWriteOptions):
-
-    cdef:
-        CParquetFileWriteOptions* parquet_options
-        object _properties
-
-    def update(self, **kwargs):
-        arrow_fields = {
-            "use_deprecated_int96_timestamps",
-            "coerce_timestamps",
-            "allow_truncated_timestamps",
-        }
-
-        setters = set()
-        for name, value in kwargs.items():
-            if name not in self._properties:
-                raise TypeError("unexpected parquet write option: " + name)
-            self._properties[name] = value
-            if name in arrow_fields:
-                setters.add(self._set_arrow_properties)
-            else:
-                setters.add(self._set_properties)
-
-        for setter in setters:
-            setter()
-
-    def _set_properties(self):
-        cdef CParquetFileWriteOptions* opts = self.parquet_options
-
-        opts.writer_properties = _create_writer_properties(
-            use_dictionary=self._properties["use_dictionary"],
-            compression=self._properties["compression"],
-            version=self._properties["version"],
-            write_statistics=self._properties["write_statistics"],
-            data_page_size=self._properties["data_page_size"],
-            compression_level=self._properties["compression_level"],
-            use_byte_stream_split=(
-                self._properties["use_byte_stream_split"]
-            ),
-            data_page_version=self._properties["data_page_version"],
-        )
-
-    def _set_arrow_properties(self):
-        cdef CParquetFileWriteOptions* opts = self.parquet_options
-
-        opts.arrow_writer_properties = _create_arrow_writer_properties(
-            use_deprecated_int96_timestamps=(
-                self._properties["use_deprecated_int96_timestamps"]
-            ),
-            coerce_timestamps=self._properties["coerce_timestamps"],
-            allow_truncated_timestamps=(
-                self._properties["allow_truncated_timestamps"]
-            ),
-            writer_engine_version="V2",
-            use_compliant_nested_type=(
-                self._properties["use_compliant_nested_type"]
-            )
-        )
-
-    cdef void init(self, const shared_ptr[CFileWriteOptions]& sp):
-        FileWriteOptions.init(self, sp)
-        self.parquet_options = <CParquetFileWriteOptions*> sp.get()
-        self._properties = dict(
-            use_dictionary=True,
-            compression="snappy",
-            version="1.0",
-            write_statistics=None,
-            data_page_size=None,
-            compression_level=None,
-            use_byte_stream_split=False,
-            data_page_version="1.0",
-            use_deprecated_int96_timestamps=False,
-            coerce_timestamps=None,
-            allow_truncated_timestamps=False,
-            use_compliant_nested_type=False,
-        )
-        self._set_properties()
-        self._set_arrow_properties()
-
-
-cdef set _PARQUET_READ_OPTIONS = {'dictionary_columns'}
-
-
-cdef class ParquetFileFormat(FileFormat):
-
-    cdef:
-        CParquetFileFormat* parquet_format
-
-    def __init__(self, read_options=None,
-                 default_fragment_scan_options=None, **kwargs):
-        cdef:
-            shared_ptr[CParquetFileFormat] wrapped
-            CParquetFileFormatReaderOptions* options
-
-        # Read/scan options
-        read_options_args = {option: kwargs[option] for option in kwargs
-                             if option in _PARQUET_READ_OPTIONS}
-        scan_args = {option: kwargs[option] for option in kwargs
-                     if option not in _PARQUET_READ_OPTIONS}
-        if read_options and read_options_args:
-            duplicates = ', '.join(sorted(read_options_args))
-            raise ValueError(f'If `read_options` is given, '
-                             f'cannot specify {duplicates}')
-        if default_fragment_scan_options and scan_args:
-            duplicates = ', '.join(sorted(scan_args))
-            raise ValueError(f'If `default_fragment_scan_options` is given, '
-                             f'cannot specify {duplicates}')
-
-        if read_options is None:
-            read_options = ParquetReadOptions(**read_options_args)
-        elif isinstance(read_options, dict):
-            # For backwards compatibility
-            duplicates = []
-            for option, value in read_options.items():
-                if option in _PARQUET_READ_OPTIONS:
-                    read_options_args[option] = value
-                else:
-                    duplicates.append(option)
-                    scan_args[option] = value
-            if duplicates:
-                duplicates = ", ".join(duplicates)
-                warnings.warn(f'The scan options {duplicates} should be '
-                              'specified directly as keyword arguments')
-            read_options = ParquetReadOptions(**read_options_args)
-        elif not isinstance(read_options, ParquetReadOptions):
-            raise TypeError('`read_options` must be either a dictionary or an '
-                            'instance of ParquetReadOptions')
-
-        if default_fragment_scan_options is None:
-            default_fragment_scan_options = ParquetFragmentScanOptions(
-                **scan_args)
-        elif isinstance(default_fragment_scan_options, dict):
-            default_fragment_scan_options = ParquetFragmentScanOptions(
-                **default_fragment_scan_options)
-        elif not isinstance(default_fragment_scan_options,
-                            ParquetFragmentScanOptions):
-            raise TypeError('`default_fragment_scan_options` must be either a '
-                            'dictionary or an instance of '
-                            'ParquetFragmentScanOptions')
-
-        wrapped = make_shared[CParquetFileFormat]()
-        options = &(wrapped.get().reader_options)
-        if read_options.dictionary_columns is not None:
-            for column in read_options.dictionary_columns:
-                options.dict_columns.insert(tobytes(column))
-
-        self.init(<shared_ptr[CFileFormat]> wrapped)
-        self.default_fragment_scan_options = default_fragment_scan_options
-
-    cdef void init(self, const shared_ptr[CFileFormat]& sp):
-        FileFormat.init(self, sp)
-        self.parquet_format = <CParquetFileFormat*> sp.get()
-
-    @property
-    def read_options(self):
-        cdef CParquetFileFormatReaderOptions* options
-        options = &self.parquet_format.reader_options
-        return ParquetReadOptions(
-            dictionary_columns={frombytes(col)
-                                for col in options.dict_columns},
-        )
-
-    def make_write_options(self, **kwargs):
-        opts = FileFormat.make_write_options(self)
-        (<ParquetFileWriteOptions> opts).update(**kwargs)
-        return opts
-
-    cdef _set_default_fragment_scan_options(self, FragmentScanOptions options):
-        if options.type_name == 'parquet':
-            self.parquet_format.default_fragment_scan_options = options.wrapped
-        else:
-            super()._set_default_fragment_scan_options(options)
-
-    def equals(self, ParquetFileFormat other):
-        return (
-            self.read_options.equals(other.read_options) and
-            self.default_fragment_scan_options ==
-            other.default_fragment_scan_options
-        )
-
-    def __reduce__(self):
-        return ParquetFileFormat, (self.read_options,
-                                   self.default_fragment_scan_options)
-
-    def __repr__(self):
-        return f"<ParquetFileFormat read_options={self.read_options}>"
-
-    def make_fragment(self, file, filesystem=None,
-                      Expression partition_expression=None, row_groups=None):
-        cdef:
-            vector[int] c_row_groups
-
-        if partition_expression is None:
-            partition_expression = _true
-
-        if row_groups is None:
-            return super().make_fragment(file, filesystem,
-                                         partition_expression)
-
-        c_source = _make_file_source(file, filesystem)
-        c_row_groups = [<int> row_group for row_group in set(row_groups)]
-
-        c_fragment = <shared_ptr[CFragment]> GetResultValue(
-            self.parquet_format.MakeFragment(move(c_source),
-                                             partition_expression.unwrap(),
-                                             <shared_ptr[CSchema]>nullptr,
-                                             move(c_row_groups)))
-        return Fragment.wrap(move(c_fragment))
-
-
-cdef class ParquetFragmentScanOptions(FragmentScanOptions):
-    """Scan-specific options for Parquet fragments.
-
-    Parameters
-    ----------
-    use_buffered_stream : bool, default False
-        Read files through buffered input streams rather than loading entire
-        row groups at once. This may be enabled to reduce memory overhead.
-        Disabled by default.
-    buffer_size : int, default 8192
-        Size of buffered stream, if enabled. Default is 8KB.
-    pre_buffer : bool, default False
-        If enabled, pre-buffer the raw Parquet data instead of issuing one
-        read per column chunk. This can improve performance on high-latency
-        filesystems.
-    enable_parallel_column_conversion : bool, default False
-        EXPERIMENTAL: Parallelize conversion across columns. This option is
-        ignored if a scan is already parallelized across input files to avoid
-        thread contention. This option will be removed after support is added
-        for simultaneous parallelization across files and columns.
-    """
-
-    cdef:
-        CParquetFragmentScanOptions* parquet_options
-
-    # Avoid mistakingly creating attributes
-    __slots__ = ()
-
-    def __init__(self, bint use_buffered_stream=False,
-                 buffer_size=8192,
-                 bint pre_buffer=False,
-                 bint enable_parallel_column_conversion=False):
-        self.init(shared_ptr[CFragmentScanOptions](
-            new CParquetFragmentScanOptions()))
-        self.use_buffered_stream = use_buffered_stream
-        self.buffer_size = buffer_size
-        self.pre_buffer = pre_buffer
-        self.enable_parallel_column_conversion = \
-            enable_parallel_column_conversion
-
-    cdef void init(self, const shared_ptr[CFragmentScanOptions]& sp):
-        FragmentScanOptions.init(self, sp)
-        self.parquet_options = <CParquetFragmentScanOptions*> sp.get()
-
-    cdef CReaderProperties* reader_properties(self):
-        return self.parquet_options.reader_properties.get()
-
-    cdef ArrowReaderProperties* arrow_reader_properties(self):
-        return self.parquet_options.arrow_reader_properties.get()
-
-    @property
-    def use_buffered_stream(self):
-        return self.reader_properties().is_buffered_stream_enabled()
-
-    @use_buffered_stream.setter
-    def use_buffered_stream(self, bint use_buffered_stream):
-        if use_buffered_stream:
-            self.reader_properties().enable_buffered_stream()
-        else:
-            self.reader_properties().disable_buffered_stream()
-
-    @property
-    def buffer_size(self):
-        return self.reader_properties().buffer_size()
-
-    @buffer_size.setter
-    def buffer_size(self, buffer_size):
-        if buffer_size <= 0:
-            raise ValueError("Buffer size must be larger than zero")
-        self.reader_properties().set_buffer_size(buffer_size)
-
-    @property
-    def pre_buffer(self):
-        return self.arrow_reader_properties().pre_buffer()
-
-    @pre_buffer.setter
-    def pre_buffer(self, bint pre_buffer):
-        self.arrow_reader_properties().set_pre_buffer(pre_buffer)
-
-    @property
-    def enable_parallel_column_conversion(self):
-        return self.parquet_options.enable_parallel_column_conversion
-
-    @enable_parallel_column_conversion.setter
-    def enable_parallel_column_conversion(
-            self, bint enable_parallel_column_conversion):
-        self.parquet_options.enable_parallel_column_conversion = \
-            enable_parallel_column_conversion
-
-    def equals(self, ParquetFragmentScanOptions other):
-        return (
-            self.use_buffered_stream == other.use_buffered_stream and
-            self.buffer_size == other.buffer_size and
-            self.pre_buffer == other.pre_buffer and
-            self.enable_parallel_column_conversion ==
-            other.enable_parallel_column_conversion)
-
-    def __reduce__(self):
-        return ParquetFragmentScanOptions, (
-            self.use_buffered_stream, self.buffer_size, self.pre_buffer,
-            self.enable_parallel_column_conversion)
-
-
-cdef class IpcFileWriteOptions(FileWriteOptions):
-
-    def __init__(self):
-        _forbid_instantiation(self.__class__)
-
-
-cdef class IpcFileFormat(FileFormat):
-
-    def __init__(self):
-        self.init(shared_ptr[CFileFormat](new CIpcFileFormat()))
-
-    def equals(self, IpcFileFormat other):
-        return True
-
-    @property
-    def default_extname(self):
-        return "feather"
-
-    def __reduce__(self):
-        return IpcFileFormat, tuple()
-
-
-cdef class CsvFileFormat(FileFormat):
-    cdef:
-        CCsvFileFormat* csv_format
-
-    # Avoid mistakingly creating attributes
-    __slots__ = ()
-
-    def __init__(self, ParseOptions parse_options=None,
-                 default_fragment_scan_options=None,
-                 ConvertOptions convert_options=None,
-                 ReadOptions read_options=None):
-        self.init(shared_ptr[CFileFormat](new CCsvFileFormat()))
-        if parse_options is not None:
-            self.parse_options = parse_options
-        if convert_options is not None or read_options is not None:
-            if default_fragment_scan_options:
-                raise ValueError('If `default_fragment_scan_options` is '
-                                 'given, cannot specify convert_options '
-                                 'or read_options')
-            self.default_fragment_scan_options = CsvFragmentScanOptions(
-                convert_options=convert_options, read_options=read_options)
-        elif isinstance(default_fragment_scan_options, dict):
-            self.default_fragment_scan_options = CsvFragmentScanOptions(
-                **default_fragment_scan_options)
-        elif isinstance(default_fragment_scan_options, CsvFragmentScanOptions):
-            self.default_fragment_scan_options = default_fragment_scan_options
-        elif default_fragment_scan_options is not None:
-            raise TypeError('`default_fragment_scan_options` must be either '
-                            'a dictionary or an instance of '
-                            'CsvFragmentScanOptions')
-
-    cdef void init(self, const shared_ptr[CFileFormat]& sp):
-        FileFormat.init(self, sp)
-        self.csv_format = <CCsvFileFormat*> sp.get()
-
-    def make_write_options(self):
-        raise NotImplemented("writing CSV datasets")
-
-    @property
-    def parse_options(self):
-        return ParseOptions.wrap(self.csv_format.parse_options)
-
-    @parse_options.setter
-    def parse_options(self, ParseOptions parse_options not None):
-        self.csv_format.parse_options = parse_options.options
-
-    cdef _set_default_fragment_scan_options(self, FragmentScanOptions options):
-        if options.type_name == 'csv':
-            self.csv_format.default_fragment_scan_options = options.wrapped
-        else:
-            super()._set_default_fragment_scan_options(options)
-
-    def equals(self, CsvFileFormat other):
-        return (
-            self.parse_options.equals(other.parse_options) and
-            self.default_fragment_scan_options ==
-            other.default_fragment_scan_options)
-
-    def __reduce__(self):
-        return CsvFileFormat, (self.parse_options,
-                               self.default_fragment_scan_options)
-
-    def __repr__(self):
-        return f"<CsvFileFormat parse_options={self.parse_options}>"
-
-
-cdef class CsvFragmentScanOptions(FragmentScanOptions):
-    """Scan-specific options for CSV fragments."""
-
-    cdef:
-        CCsvFragmentScanOptions* csv_options
-
-    # Avoid mistakingly creating attributes
-    __slots__ = ()
-
-    def __init__(self, ConvertOptions convert_options=None,
-                 ReadOptions read_options=None):
-        self.init(shared_ptr[CFragmentScanOptions](
-            new CCsvFragmentScanOptions()))
-        if convert_options is not None:
-            self.convert_options = convert_options
-        if read_options is not None:
-            self.read_options = read_options
-
-    cdef void init(self, const shared_ptr[CFragmentScanOptions]& sp):
-        FragmentScanOptions.init(self, sp)
-        self.csv_options = <CCsvFragmentScanOptions*> sp.get()
-
-    @property
-    def convert_options(self):
-        return ConvertOptions.wrap(self.csv_options.convert_options)
-
-    @convert_options.setter
-    def convert_options(self, ConvertOptions convert_options not None):
-        self.csv_options.convert_options = convert_options.options
-
-    @property
-    def read_options(self):
-        return ReadOptions.wrap(self.csv_options.read_options)
-
-    @read_options.setter
-    def read_options(self, ReadOptions read_options not None):
-        self.csv_options.read_options = read_options.options
-
-    def equals(self, CsvFragmentScanOptions other):
-        return (
-            other and
-            self.convert_options.equals(other.convert_options) and
-            self.read_options.equals(other.read_options))
-
-    def __reduce__(self):
-        return CsvFragmentScanOptions, (self.convert_options,
-                                        self.read_options)
-
-
-cdef class Partitioning(_Weakrefable):
-
-    cdef:
-        shared_ptr[CPartitioning] wrapped
-        CPartitioning* partitioning
-
-    def __init__(self):
-        _forbid_instantiation(self.__class__)
-
-    cdef init(self, const shared_ptr[CPartitioning]& sp):
-        self.wrapped = sp
-        self.partitioning = sp.get()
-
-    @staticmethod
-    cdef wrap(const shared_ptr[CPartitioning]& sp):
-        type_name = frombytes(sp.get().type_name())
-
-        classes = {
-            'schema': DirectoryPartitioning,
-            'hive': HivePartitioning,
-        }
-
-        class_ = classes.get(type_name, None)
-        if class_ is None:
-            raise TypeError(type_name)
-
-        cdef Partitioning self = class_.__new__(class_)
-        self.init(sp)
-        return self
-
-    cdef inline shared_ptr[CPartitioning] unwrap(self):
-        return self.wrapped
-
-    def parse(self, path):
-        cdef CResult[CExpression] result
-        result = self.partitioning.Parse(tobytes(path))
-        return Expression.wrap(GetResultValue(result))
-
-    @property
-    def schema(self):
-        """The arrow Schema attached to the partitioning."""
-        return pyarrow_wrap_schema(self.partitioning.schema())
-
-
-cdef class PartitioningFactory(_Weakrefable):
-
-    cdef:
-        shared_ptr[CPartitioningFactory] wrapped
-        CPartitioningFactory* factory
-
-    def __init__(self):
-        _forbid_instantiation(self.__class__)
-
-    cdef init(self, const shared_ptr[CPartitioningFactory]& sp):
-        self.wrapped = sp
-        self.factory = sp.get()
-
-    @staticmethod
-    cdef wrap(const shared_ptr[CPartitioningFactory]& sp):
-        cdef PartitioningFactory self = PartitioningFactory.__new__(
-            PartitioningFactory
-        )
-        self.init(sp)
-        return self
-
-    cdef inline shared_ptr[CPartitioningFactory] unwrap(self):
-        return self.wrapped
-
-
-cdef vector[shared_ptr[CArray]] _partitioning_dictionaries(
-        Schema schema, dictionaries) except *:
-    cdef:
-        vector[shared_ptr[CArray]] c_dictionaries
-
-    dictionaries = dictionaries or {}
-
-    for field in schema:
-        dictionary = dictionaries.get(field.name)
-
-        if (isinstance(field.type, pa.DictionaryType) and
-                dictionary is not None):
-            c_dictionaries.push_back(pyarrow_unwrap_array(dictionary))
-        else:
-            c_dictionaries.push_back(<shared_ptr[CArray]> nullptr)
-
-    return c_dictionaries
-
-
-cdef class DirectoryPartitioning(Partitioning):
-    """
-    A Partitioning based on a specified Schema.
-
-    The DirectoryPartitioning expects one segment in the file path for each
-    field in the schema (all fields are required to be present).
-    For example given schema<year:int16, month:int8> the path "/2009/11" would
-    be parsed to ("year"_ == 2009 and "month"_ == 11).
-
-    Parameters
-    ----------
-    schema : Schema
-        The schema that describes the partitions present in the file path.
-    dictionaries : Dict[str, Array]
-        If the type of any field of `schema` is a dictionary type, the
-        corresponding entry of `dictionaries` must be an array containing
-        every value which may be taken by the corresponding column or an
-        error will be raised in parsing.
-
-    Returns
-    -------
-    DirectoryPartitioning
-
-    Examples
-    --------
-    >>> from pyarrow.dataset import DirectoryPartitioning
-    >>> partition = DirectoryPartitioning(
-    ...     pa.schema([("year", pa.int16()), ("month", pa.int8())]))
-    >>> print(partitioning.parse("/2009/11"))
-    ((year == 2009:int16) and (month == 11:int8))
-    """
-
-    cdef:
-        CDirectoryPartitioning* directory_partitioning
-
-    def __init__(self, Schema schema not None, dictionaries=None):
-        cdef:
-            shared_ptr[CDirectoryPartitioning] c_partitioning
-
-        c_partitioning = make_shared[CDirectoryPartitioning](
-            pyarrow_unwrap_schema(schema),
-            _partitioning_dictionaries(schema, dictionaries)
-        )
-        self.init(<shared_ptr[CPartitioning]> c_partitioning)
-
-    cdef init(self, const shared_ptr[CPartitioning]& sp):
-        Partitioning.init(self, sp)
-        self.directory_partitioning = <CDirectoryPartitioning*> sp.get()
-
-    @staticmethod
-    def discover(field_names=None, infer_dictionary=False,
-                 max_partition_dictionary_size=0,
-                 schema=None):
-        """
-        Discover a DirectoryPartitioning.
-
-        Parameters
-        ----------
-        field_names : list of str
-            The names to associate with the values from the subdirectory names.
-            If schema is given, will be populated from the schema.
-        infer_dictionary : bool, default False
-            When inferring a schema for partition fields, yield dictionary
-            encoded types instead of plain types. This can be more efficient
-            when materializing virtual columns, and Expressions parsed by the
-            finished Partitioning will include dictionaries of all unique
-            inspected values for each field.
-        max_partition_dictionary_size : int, default 0
-            Synonymous with infer_dictionary for backwards compatibility with
-            1.0: setting this to -1 or None is equivalent to passing
-            infer_dictionary=True.
-        schema : Schema, default None
-            Use this schema instead of inferring a schema from partition
-            values. Partition values will be validated against this schema
-            before accumulation into the Partitioning's dictionary.
-
-        Returns
-        -------
-        PartitioningFactory
-            To be used in the FileSystemFactoryOptions.
-        """
-        cdef:
-            CPartitioningFactoryOptions c_options
-            vector[c_string] c_field_names
-
-        if max_partition_dictionary_size in {-1, None}:
-            infer_dictionary = True
-        elif max_partition_dictionary_size != 0:
-            raise NotImplemented("max_partition_dictionary_size must be "
-                                 "0, -1, or None")
-
-        if infer_dictionary:
-            c_options.infer_dictionary = True
-
-        if schema:
-            c_options.schema = pyarrow_unwrap_schema(schema)
-            c_field_names = [tobytes(f.name) for f in schema]
-        elif not field_names:
-            raise ValueError(
-                "Neither field_names nor schema was passed; "
-                "cannot infer field_names")
-        else:
-            c_field_names = [tobytes(s) for s in field_names]
-        return PartitioningFactory.wrap(
-            CDirectoryPartitioning.MakeFactory(c_field_names, c_options))
-
-
-cdef class HivePartitioning(Partitioning):
-    """
-    A Partitioning for "/$key=$value/" nested directories as found in
-    Apache Hive.
-
-    Multi-level, directory based partitioning scheme originating from
-    Apache Hive with all data files stored in the leaf directories. Data is
-    partitioned by static values of a particular column in the schema.
-    Partition keys are represented in the form $key=$value in directory names.
-    Field order is ignored, as are missing or unrecognized field names.
-
-    For example, given schema<year:int16, month:int8, day:int8>, a possible
-    path would be "/year=2009/month=11/day=15".
-
-    Parameters
-    ----------
-    schema : Schema
-        The schema that describes the partitions present in the file path.
-    dictionaries : Dict[str, Array]
-        If the type of any field of `schema` is a dictionary type, the
-        corresponding entry of `dictionaries` must be an array containing
-        every value which may be taken by the corresponding column or an
-        error will be raised in parsing.
-    null_fallback : str, default "__HIVE_DEFAULT_PARTITION__"
-        If any field is None then this fallback will be used as a label
-
-    Returns
-    -------
-    HivePartitioning
-
-    Examples
-    --------
-    >>> from pyarrow.dataset import HivePartitioning
-    >>> partitioning = HivePartitioning(
-    ...     pa.schema([("year", pa.int16()), ("month", pa.int8())]))
-    >>> print(partitioning.parse("/year=2009/month=11"))
-    ((year == 2009:int16) and (month == 11:int8))
-
-    """
-
-    cdef:
-        CHivePartitioning* hive_partitioning
-
-    def __init__(self,
-                 Schema schema not None,
-                 dictionaries=None,
-                 null_fallback="__HIVE_DEFAULT_PARTITION__"):
-
-        cdef:
-            shared_ptr[CHivePartitioning] c_partitioning
-            c_string c_null_fallback = tobytes(null_fallback)
-
-        c_partitioning = make_shared[CHivePartitioning](
-            pyarrow_unwrap_schema(schema),
-            _partitioning_dictionaries(schema, dictionaries),
-            c_null_fallback
-        )
-        self.init(<shared_ptr[CPartitioning]> c_partitioning)
-
-    cdef init(self, const shared_ptr[CPartitioning]& sp):
-        Partitioning.init(self, sp)
-        self.hive_partitioning = <CHivePartitioning*> sp.get()
-
-    @staticmethod
-    def discover(infer_dictionary=False,
-                 max_partition_dictionary_size=0,
-                 null_fallback="__HIVE_DEFAULT_PARTITION__",
-                 schema=None):
-        """
-        Discover a HivePartitioning.
-
-        Parameters
-        ----------
-        infer_dictionary : bool, default False
-            When inferring a schema for partition fields, yield dictionary
-            encoded types instead of plain. This can be more efficient when
-            materializing virtual columns, and Expressions parsed by the
-            finished Partitioning will include dictionaries of all unique
-            inspected values for each field.
-        max_partition_dictionary_size : int, default 0
-            Synonymous with infer_dictionary for backwards compatibility with
-            1.0: setting this to -1 or None is equivalent to passing
-            infer_dictionary=True.
-        null_fallback : str, default "__HIVE_DEFAULT_PARTITION__"
-            When inferring a schema for partition fields this value will be
-            replaced by null.  The default is set to __HIVE_DEFAULT_PARTITION__
-            for compatibility with Spark
-        schema : Schema, default None
-            Use this schema instead of inferring a schema from partition
-            values. Partition values will be validated against this schema
-            before accumulation into the Partitioning's dictionary.
-
-        Returns
-        -------
-        PartitioningFactory
-            To be used in the FileSystemFactoryOptions.
-        """
-        cdef:
-            CHivePartitioningFactoryOptions c_options
-
-        if max_partition_dictionary_size in {-1, None}:
-            infer_dictionary = True
-        elif max_partition_dictionary_size != 0:
-            raise NotImplemented("max_partition_dictionary_size must be "
-                                 "0, -1, or None")
-
-        if infer_dictionary:
-            c_options.infer_dictionary = True
-
-        c_options.null_fallback = tobytes(null_fallback)
-
-        if schema:
-            c_options.schema = pyarrow_unwrap_schema(schema)
-
-        return PartitioningFactory.wrap(
-            CHivePartitioning.MakeFactory(c_options))
-
-
-cdef class DatasetFactory(_Weakrefable):
-    """
-    DatasetFactory is used to create a Dataset, inspect the Schema
-    of the fragments contained in it, and declare a partitioning.
-    """
-
-    cdef:
-        shared_ptr[CDatasetFactory] wrapped
-        CDatasetFactory* factory
-
-    def __init__(self, list children):
-        _forbid_instantiation(self.__class__)
-
-    cdef init(self, const shared_ptr[CDatasetFactory]& sp):
-        self.wrapped = sp
-        self.factory = sp.get()
-
-    @staticmethod
-    cdef wrap(const shared_ptr[CDatasetFactory]& sp):
-        cdef DatasetFactory self = \
-            DatasetFactory.__new__(DatasetFactory)
-        self.init(sp)
-        return self
-
-    cdef inline shared_ptr[CDatasetFactory] unwrap(self) nogil:
-        return self.wrapped
-
-    @property
-    def root_partition(self):
-        return Expression.wrap(self.factory.root_partition())
-
-    @root_partition.setter
-    def root_partition(self, Expression expr):
-        check_status(self.factory.SetRootPartition(expr.unwrap()))
-
-    def inspect_schemas(self):
-        cdef CResult[vector[shared_ptr[CSchema]]] result
-        cdef CInspectOptions options
-        with nogil:
-            result = self.factory.InspectSchemas(options)
-
-        schemas = []
-        for s in GetResultValue(result):
-            schemas.append(pyarrow_wrap_schema(s))
-        return schemas
-
-    def inspect(self):
-        """
-        Inspect all data fragments and return a common Schema.
-
-        Returns
-        -------
-        Schema
-        """
-        cdef:
-            CInspectOptions options
-            CResult[shared_ptr[CSchema]] result
-        with nogil:
-            result = self.factory.Inspect(options)
-        return pyarrow_wrap_schema(GetResultValue(result))
-
-    def finish(self, Schema schema=None):
-        """
-        Create a Dataset using the inspected schema or an explicit schema
-        (if given).
-
-        Parameters
-        ----------
-        schema: Schema, default None
-            The schema to conform the source to.  If None, the inspected
-            schema is used.
-
-        Returns
-        -------
-        Dataset
-        """
-        cdef:
-            shared_ptr[CSchema] sp_schema
-            CResult[shared_ptr[CDataset]] result
-
-        if schema is not None:
-            sp_schema = pyarrow_unwrap_schema(schema)
-            with nogil:
-                result = self.factory.FinishWithSchema(sp_schema)
-        else:
-            with nogil:
-                result = self.factory.Finish()
-
-        return Dataset.wrap(GetResultValue(result))
-
-
-cdef class FileSystemFactoryOptions(_Weakrefable):
-    """
-    Influences the discovery of filesystem paths.
-
-    Parameters
-    ----------
-    partition_base_dir : str, optional
-        For the purposes of applying the partitioning, paths will be
-        stripped of the partition_base_dir. Files not matching the
-        partition_base_dir prefix will be skipped for partitioning discovery.
-        The ignored files will still be part of the Dataset, but will not
-        have partition information.
-    partitioning: Partitioning/PartitioningFactory, optional
-       Apply the Partitioning to every discovered Fragment. See Partitioning or
-       PartitioningFactory documentation.
-    exclude_invalid_files : bool, optional (default True)
-        If True, invalid files will be excluded (file format specific check).
-        This will incur IO for each files in a serial and single threaded
-        fashion. Disabling this feature will skip the IO, but unsupported
-        files may be present in the Dataset (resulting in an error at scan
-        time).
-    selector_ignore_prefixes : list, optional
-        When discovering from a Selector (and not from an explicit file list),
-        ignore files and directories matching any of these prefixes.
-        By default this is ['.', '_'].
-    """
-
-    cdef:
-        CFileSystemFactoryOptions options
-
-    __slots__ = ()  # avoid mistakingly creating attributes
-
-    def __init__(self, partition_base_dir=None, partitioning=None,
-                 exclude_invalid_files=None,
-                 list selector_ignore_prefixes=None):
-        if isinstance(partitioning, PartitioningFactory):
-            self.partitioning_factory = partitioning
-        elif isinstance(partitioning, Partitioning):
-            self.partitioning = partitioning
-
-        if partition_base_dir is not None:
-            self.partition_base_dir = partition_base_dir
-        if exclude_invalid_files is not None:
-            self.exclude_invalid_files = exclude_invalid_files
-        if selector_ignore_prefixes is not None:
-            self.selector_ignore_prefixes = selector_ignore_prefixes
-
-    cdef inline CFileSystemFactoryOptions unwrap(self):
-        return self.options
-
-    @property
-    def partitioning(self):
-        """Partitioning to apply to discovered files.
-
-        NOTE: setting this property will overwrite partitioning_factory.
-        """
-        c_partitioning = self.options.partitioning.partitioning()
-        if c_partitioning.get() == nullptr:
-            return None
-        return Partitioning.wrap(c_partitioning)
-
-    @partitioning.setter
-    def partitioning(self, Partitioning value):
-        self.options.partitioning = (<Partitioning> value).unwrap()
-
-    @property
-    def partitioning_factory(self):
-        """PartitioningFactory to apply to discovered files and
-        discover a Partitioning.
-
-        NOTE: setting this property will overwrite partitioning.
-        """
-        c_factory = self.options.partitioning.factory()
-        if c_factory.get() == nullptr:
-            return None
-        return PartitioningFactory.wrap(c_factory)
-
-    @partitioning_factory.setter
-    def partitioning_factory(self, PartitioningFactory value):
-        self.options.partitioning = (<PartitioningFactory> value).unwrap()
-
-    @property
-    def partition_base_dir(self):
-        """
-        Base directory to strip paths before applying the partitioning.
-        """
-        return frombytes(self.options.partition_base_dir)
-
-    @partition_base_dir.setter
-    def partition_base_dir(self, value):
-        self.options.partition_base_dir = tobytes(value)
-
-    @property
-    def exclude_invalid_files(self):
-        """Whether to exclude invalid files."""
-        return self.options.exclude_invalid_files
-
-    @exclude_invalid_files.setter
-    def exclude_invalid_files(self, bint value):
-        self.options.exclude_invalid_files = value
-
-    @property
-    def selector_ignore_prefixes(self):
-        """
-        List of prefixes. Files matching one of those prefixes will be
-        ignored by the discovery process.
-        """
-        return [frombytes(p) for p in self.options.selector_ignore_prefixes]
-
-    @selector_ignore_prefixes.setter
-    def selector_ignore_prefixes(self, values):
-        self.options.selector_ignore_prefixes = [tobytes(v) for v in values]
-
-
-cdef class FileSystemDatasetFactory(DatasetFactory):
-    """
-    Create a DatasetFactory from a list of paths with schema inspection.
-
-    Parameters
-    ----------
-    filesystem : pyarrow.fs.FileSystem
-        Filesystem to discover.
-    paths_or_selector: pyarrow.fs.Selector or list of path-likes
-        Either a Selector object or a list of path-like objects.
-    format : FileFormat
-        Currently only ParquetFileFormat and IpcFileFormat are supported.
-    options : FileSystemFactoryOptions, optional
-        Various flags influencing the discovery of filesystem paths.
-    """
-
-    cdef:
-        CFileSystemDatasetFactory* filesystem_factory
-
-    def __init__(self, FileSystem filesystem not None, paths_or_selector,
-                 FileFormat format not None,
-                 FileSystemFactoryOptions options=None):
-        cdef:
-            vector[c_string] paths
-            CFileSelector c_selector
-            CResult[shared_ptr[CDatasetFactory]] result
-            shared_ptr[CFileSystem] c_filesystem
-            shared_ptr[CFileFormat] c_format
-            CFileSystemFactoryOptions c_options
-
-        options = options or FileSystemFactoryOptions()
-        c_options = options.unwrap()
-        c_filesystem = filesystem.unwrap()
-        c_format = format.unwrap()
-
-        if isinstance(paths_or_selector, FileSelector):
-            with nogil:
-                c_selector = (<FileSelector> paths_or_selector).selector
-                result = CFileSystemDatasetFactory.MakeFromSelector(
-                    c_filesystem,
-                    c_selector,
-                    c_format,
-                    c_options
-                )
-        elif isinstance(paths_or_selector, (list, tuple)):
-            paths = [tobytes(s) for s in paths_or_selector]
-            with nogil:
-                result = CFileSystemDatasetFactory.MakeFromPaths(
-                    c_filesystem,
-                    paths,
-                    c_format,
-                    c_options
-                )
-        else:
-            raise TypeError('Must pass either paths or a FileSelector, but '
-                            'passed {}'.format(type(paths_or_selector)))
-
-        self.init(GetResultValue(result))
-
-    cdef init(self, shared_ptr[CDatasetFactory]& sp):
-        DatasetFactory.init(self, sp)
-        self.filesystem_factory = <CFileSystemDatasetFactory*> sp.get()
-
-
-cdef class UnionDatasetFactory(DatasetFactory):
-    """
-    Provides a way to inspect/discover a Dataset's expected schema before
-    materialization.
-
-    Parameters
-    ----------
-    factories : list of DatasetFactory
-    """
-
-    cdef:
-        CUnionDatasetFactory* union_factory
-
-    def __init__(self, list factories):
-        cdef:
-            DatasetFactory factory
-            vector[shared_ptr[CDatasetFactory]] c_factories
-        for factory in factories:
-            c_factories.push_back(factory.unwrap())
-        self.init(GetResultValue(CUnionDatasetFactory.Make(c_factories)))
-
-    cdef init(self, const shared_ptr[CDatasetFactory]& sp):
-        DatasetFactory.init(self, sp)
-        self.union_factory = <CUnionDatasetFactory*> sp.get()
-
-
-cdef class ParquetFactoryOptions(_Weakrefable):
-    """
-    Influences the discovery of parquet dataset.
-
-    Parameters
-    ----------
-    partition_base_dir : str, optional
-        For the purposes of applying the partitioning, paths will be
-        stripped of the partition_base_dir. Files not matching the
-        partition_base_dir prefix will be skipped for partitioning discovery.
-        The ignored files will still be part of the Dataset, but will not
-        have partition information.
-    partitioning : Partitioning, PartitioningFactory, optional
-        The partitioning scheme applied to fragments, see ``Partitioning``.
-    validate_column_chunk_paths : bool, default False
-        Assert that all ColumnChunk paths are consistent. The parquet spec
-        allows for ColumnChunk data to be stored in multiple files, but
-        ParquetDatasetFactory supports only a single file with all ColumnChunk
-        data. If this flag is set construction of a ParquetDatasetFactory will
-        raise an error if ColumnChunk data is not resident in a single file.
-    """
-
-    cdef:
-        CParquetFactoryOptions options
-
-    __slots__ = ()  # avoid mistakingly creating attributes
-
-    def __init__(self, partition_base_dir=None, partitioning=None,
-                 validate_column_chunk_paths=False):
-        if isinstance(partitioning, PartitioningFactory):
-            self.partitioning_factory = partitioning
-        elif isinstance(partitioning, Partitioning):
-            self.partitioning = partitioning
-
-        if partition_base_dir is not None:
-            self.partition_base_dir = partition_base_dir
-
-        self.options.validate_column_chunk_paths = validate_column_chunk_paths
-
-    cdef inline CParquetFactoryOptions unwrap(self):
-        return self.options
-
-    @property
-    def partitioning(self):
-        """Partitioning to apply to discovered files.
-
-        NOTE: setting this property will overwrite partitioning_factory.
-        """
-        c_partitioning = self.options.partitioning.partitioning()
-        if c_partitioning.get() == nullptr:
... 78503 lines suppressed ...

[arrow-rs] 07/14: Removed Java.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit f864f413edf36a0ee0320986047008ded09fabed
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:20:46 2021 +0000

    Removed Java.
---
 java/.gitattributes                                |    2 -
 java/.gitignore                                    |   23 -
 java/README.md                                     |  164 --
 java/adapter/avro/pom.xml                          |   59 -
 .../main/java/org/apache/arrow/AvroToArrow.java    |   67 -
 .../java/org/apache/arrow/AvroToArrowConfig.java   |   86 -
 .../org/apache/arrow/AvroToArrowConfigBuilder.java |   74 -
 .../java/org/apache/arrow/AvroToArrowUtils.java    |  805 ------
 .../apache/arrow/AvroToArrowVectorIterator.java    |  186 --
 .../apache/arrow/consumers/AvroArraysConsumer.java |   74 -
 .../arrow/consumers/AvroBooleanConsumer.java       |   43 -
 .../apache/arrow/consumers/AvroBytesConsumer.java  |   49 -
 .../apache/arrow/consumers/AvroDoubleConsumer.java |   42 -
 .../apache/arrow/consumers/AvroEnumConsumer.java   |   43 -
 .../apache/arrow/consumers/AvroFixedConsumer.java  |   46 -
 .../apache/arrow/consumers/AvroFloatConsumer.java  |   42 -
 .../apache/arrow/consumers/AvroIntConsumer.java    |   42 -
 .../apache/arrow/consumers/AvroLongConsumer.java   |   42 -
 .../apache/arrow/consumers/AvroMapConsumer.java    |   79 -
 .../apache/arrow/consumers/AvroNullConsumer.java   |   39 -
 .../apache/arrow/consumers/AvroStringConsumer.java |   48 -
 .../apache/arrow/consumers/AvroStructConsumer.java |   76 -
 .../apache/arrow/consumers/AvroUnionsConsumer.java |   86 -
 .../apache/arrow/consumers/BaseAvroConsumer.java   |   65 -
 .../arrow/consumers/CompositeAvroConsumer.java     |   73 -
 .../java/org/apache/arrow/consumers/Consumer.java  |   71 -
 .../org/apache/arrow/consumers/SkipConsumer.java   |   67 -
 .../org/apache/arrow/consumers/SkipFunction.java   |   30 -
 .../arrow/consumers/logical/AvroDateConsumer.java  |   43 -
 .../consumers/logical/AvroDecimalConsumer.java     |   88 -
 .../consumers/logical/AvroTimeMicroConsumer.java   |   43 -
 .../consumers/logical/AvroTimeMillisConsumer.java  |   43 -
 .../logical/AvroTimestampMicrosConsumer.java       |   43 -
 .../logical/AvroTimestampMillisConsumer.java       |   43 -
 .../org/apache/arrow/AvroLogicalTypesTest.java     |  201 --
 .../java/org/apache/arrow/AvroSkipFieldTest.java   |  626 ----
 .../test/java/org/apache/arrow/AvroTestBase.java   |  229 --
 .../org/apache/arrow/AvroToArrowIteratorTest.java  |  313 --
 .../java/org/apache/arrow/AvroToArrowTest.java     |  477 ---
 .../org/apache/arrow/TestWriteReadAvroRecord.java  |   93 -
 .../resources/schema/attrs/test_enum_attrs.avsc    |   24 -
 .../resources/schema/attrs/test_fixed_attr.avsc    |   24 -
 .../resources/schema/attrs/test_record_attrs.avsc  |   37 -
 .../test/resources/schema/logical/test_date.avsc   |   23 -
 .../schema/logical/test_decimal_invalid1.avsc      |   25 -
 .../schema/logical/test_decimal_invalid2.avsc      |   25 -
 .../schema/logical/test_decimal_invalid3.avsc      |   25 -
 .../schema/logical/test_decimal_invalid4.avsc      |   26 -
 .../logical/test_decimal_with_original_bytes.avsc  |   25 -
 .../logical/test_decimal_with_original_fixed.avsc  |   26 -
 .../resources/schema/logical/test_time_micros.avsc |   23 -
 .../resources/schema/logical/test_time_millis.avsc |   23 -
 .../schema/logical/test_timestamp_micros.avsc      |   23 -
 .../schema/logical/test_timestamp_millis.avsc      |   23 -
 .../schema/skip/test_skip_array_before.avsc        |   27 -
 .../schema/skip/test_skip_array_expected.avsc      |   26 -
 .../resources/schema/skip/test_skip_base1.avsc     |   28 -
 .../resources/schema/skip/test_skip_base2.avsc     |   29 -
 .../schema/skip/test_skip_boolean_expected.avsc    |   28 -
 .../schema/skip/test_skip_bytes_expected.avsc      |   27 -
 .../schema/skip/test_skip_double_expected.avsc     |   28 -
 .../schema/skip/test_skip_enum_expected.avsc       |   27 -
 .../schema/skip/test_skip_fixed_expected.avsc      |   27 -
 .../schema/skip/test_skip_float_expected.avsc      |   28 -
 .../schema/skip/test_skip_int_expected.avsc        |   28 -
 .../schema/skip/test_skip_long_expected.avsc       |   28 -
 .../schema/skip/test_skip_map_before.avsc          |   27 -
 .../schema/skip/test_skip_map_expected.avsc        |   26 -
 .../skip/test_skip_multi_fields_expected.avsc      |   25 -
 .../schema/skip/test_skip_record_before.avsc       |   38 -
 .../schema/skip/test_skip_record_expected.avsc     |   25 -
 .../skip/test_skip_second_level_expected.avsc      |   34 -
 .../skip/test_skip_single_field_expected.avsc      |   26 -
 .../schema/skip/test_skip_string_expected.avsc     |   27 -
 .../skip/test_skip_third_level_expected.avsc       |   45 -
 .../schema/skip/test_skip_union_before.avsc        |   28 -
 .../test_skip_union_multi_fields_expected.avsc     |   27 -
 .../test_skip_union_nullable_field_expected.avsc   |   27 -
 .../skip/test_skip_union_one_field_expected.avsc   |   27 -
 .../avro/src/test/resources/schema/test.avsc       |   27 -
 .../avro/src/test/resources/schema/test_array.avsc |   23 -
 .../avro/src/test/resources/schema/test_fixed.avsc |   23 -
 .../src/test/resources/schema/test_large_data.avsc |   75 -
 .../avro/src/test/resources/schema/test_map.avsc   |   23 -
 .../test/resources/schema/test_nested_record.avsc  |   35 -
 .../resources/schema/test_nullable_boolean.avsc    |   25 -
 .../test/resources/schema/test_nullable_bytes.avsc |   25 -
 .../resources/schema/test_nullable_double.avsc     |   25 -
 .../test/resources/schema/test_nullable_float.avsc |   25 -
 .../test/resources/schema/test_nullable_int.avsc   |   25 -
 .../test/resources/schema/test_nullable_long.avsc  |   25 -
 .../resources/schema/test_nullable_string.avsc     |   25 -
 .../test/resources/schema/test_nullable_union.avsc |   25 -
 .../resources/schema/test_primitive_boolean.avsc   |   22 -
 .../resources/schema/test_primitive_bytes.avsc     |   22 -
 .../resources/schema/test_primitive_double.avsc    |   22 -
 .../test/resources/schema/test_primitive_enum.avsc |   23 -
 .../resources/schema/test_primitive_float.avsc     |   22 -
 .../test/resources/schema/test_primitive_int.avsc  |   22 -
 .../test/resources/schema/test_primitive_long.avsc |   22 -
 .../resources/schema/test_primitive_string.avsc    |   22 -
 .../src/test/resources/schema/test_record.avsc     |   27 -
 .../avro/src/test/resources/schema/test_union.avsc |   25 -
 java/adapter/jdbc/pom.xml                          |  109 -
 .../arrow/adapter/jdbc/ArrowVectorIterator.java    |  186 --
 .../org/apache/arrow/adapter/jdbc/Constants.java   |   31 -
 .../apache/arrow/adapter/jdbc/JdbcFieldInfo.java   |  114 -
 .../org/apache/arrow/adapter/jdbc/JdbcToArrow.java |  273 --
 .../arrow/adapter/jdbc/JdbcToArrowConfig.java      |  280 --
 .../adapter/jdbc/JdbcToArrowConfigBuilder.java     |  192 --
 .../arrow/adapter/jdbc/JdbcToArrowUtils.java       |  336 ---
 .../arrow/adapter/jdbc/consumer/ArrayConsumer.java |  134 -
 .../arrow/adapter/jdbc/consumer/BaseConsumer.java  |   54 -
 .../adapter/jdbc/consumer/BigIntConsumer.java      |   87 -
 .../adapter/jdbc/consumer/BinaryConsumer.java      |  135 -
 .../arrow/adapter/jdbc/consumer/BitConsumer.java   |   87 -
 .../arrow/adapter/jdbc/consumer/BlobConsumer.java  |   72 -
 .../arrow/adapter/jdbc/consumer/ClobConsumer.java  |  161 --
 .../jdbc/consumer/CompositeJdbcConsumer.java       |   76 -
 .../arrow/adapter/jdbc/consumer/DateConsumer.java  |  116 -
 .../adapter/jdbc/consumer/DecimalConsumer.java     |   88 -
 .../adapter/jdbc/consumer/DoubleConsumer.java      |   87 -
 .../arrow/adapter/jdbc/consumer/FloatConsumer.java |   87 -
 .../arrow/adapter/jdbc/consumer/IntConsumer.java   |   87 -
 .../arrow/adapter/jdbc/consumer/JdbcConsumer.java  |   46 -
 .../arrow/adapter/jdbc/consumer/NullConsumer.java  |   38 -
 .../adapter/jdbc/consumer/SmallIntConsumer.java    |   87 -
 .../arrow/adapter/jdbc/consumer/TimeConsumer.java  |  112 -
 .../adapter/jdbc/consumer/TimestampConsumer.java   |   89 -
 .../adapter/jdbc/consumer/TimestampTZConsumer.java |   97 -
 .../adapter/jdbc/consumer/TinyIntConsumer.java     |   87 -
 .../adapter/jdbc/consumer/VarCharConsumer.java     |   86 -
 .../adapter/jdbc/AbstractJdbcToArrowTest.java      |  143 -
 .../arrow/adapter/jdbc/JdbcFieldInfoTest.java      |   45 -
 .../arrow/adapter/jdbc/JdbcToArrowConfigTest.java  |  159 -
 .../arrow/adapter/jdbc/JdbcToArrowTestHelper.java  |  381 ---
 .../java/org/apache/arrow/adapter/jdbc/Table.java  |  234 --
 .../jdbc/consumer/AbstractConsumerTest.java        |   39 -
 .../adapter/jdbc/consumer/BinaryConsumerTest.java  |  117 -
 .../adapter/jdbc/h2/JdbcToArrowArrayTest.java      |  373 ---
 .../adapter/jdbc/h2/JdbcToArrowCharSetTest.java    |  156 -
 .../adapter/jdbc/h2/JdbcToArrowDataTypesTest.java  |  249 --
 .../arrow/adapter/jdbc/h2/JdbcToArrowNullTest.java |  278 --
 .../arrow/adapter/jdbc/h2/JdbcToArrowTest.java     | 1339 ---------
 .../adapter/jdbc/h2/JdbcToArrowTimeZoneTest.java   |  169 --
 .../jdbc/h2/JdbcToArrowVectorIteratorTest.java     |  427 ---
 .../test/resources/h2/test1_all_datatypes_h2.yml   |  121 -
 .../resources/h2/test1_all_datatypes_null_h2.yml   |   51 -
 .../test1_all_datatypes_selected_null_rows_h2.yml  |   83 -
 .../jdbc/src/test/resources/h2/test1_bigint_h2.yml |   46 -
 .../jdbc/src/test/resources/h2/test1_binary_h2.yml |   46 -
 .../jdbc/src/test/resources/h2/test1_bit_h2.yml    |   46 -
 .../jdbc/src/test/resources/h2/test1_blob_h2.yml   |   46 -
 .../jdbc/src/test/resources/h2/test1_bool_h2.yml   |   46 -
 .../jdbc/src/test/resources/h2/test1_char_h2.yml   |   46 -
 .../src/test/resources/h2/test1_charset_ch_h2.yml  |   43 -
 .../src/test/resources/h2/test1_charset_h2.yml     |   53 -
 .../src/test/resources/h2/test1_charset_jp_h2.yml  |   43 -
 .../src/test/resources/h2/test1_charset_kr_h2.yml  |   43 -
 .../jdbc/src/test/resources/h2/test1_clob_h2.yml   |   46 -
 .../jdbc/src/test/resources/h2/test1_date_h2.yml   |   48 -
 .../src/test/resources/h2/test1_decimal_h2.yml     |   46 -
 .../jdbc/src/test/resources/h2/test1_double_h2.yml |   46 -
 .../src/test/resources/h2/test1_est_date_h2.yml    |   48 -
 .../src/test/resources/h2/test1_est_time_h2.yml    |   48 -
 .../test/resources/h2/test1_est_timestamp_h2.yml   |   49 -
 .../src/test/resources/h2/test1_gmt_date_h2.yml    |   48 -
 .../src/test/resources/h2/test1_gmt_time_h2.yml    |   48 -
 .../test/resources/h2/test1_gmt_timestamp_h2.yml   |   48 -
 .../jdbc/src/test/resources/h2/test1_int_h2.yml    |   46 -
 .../jdbc/src/test/resources/h2/test1_null_h2.yml   |   36 -
 .../src/test/resources/h2/test1_pst_date_h2.yml    |   48 -
 .../src/test/resources/h2/test1_pst_time_h2.yml    |   48 -
 .../test/resources/h2/test1_pst_timestamp_h2.yml   |   48 -
 .../jdbc/src/test/resources/h2/test1_real_h2.yml   |   46 -
 .../h2/test1_selected_datatypes_null_h2.yml        |   46 -
 .../src/test/resources/h2/test1_smallint_h2.yml    |   46 -
 .../jdbc/src/test/resources/h2/test1_time_h2.yml   |   46 -
 .../src/test/resources/h2/test1_timestamp_h2.yml   |   46 -
 .../src/test/resources/h2/test1_tinyint_h2.yml     |   46 -
 .../src/test/resources/h2/test1_varchar_h2.yml     |   46 -
 java/adapter/jdbc/src/test/resources/logback.xml   |   28 -
 java/adapter/orc/CMakeLists.txt                    |   43 -
 java/adapter/orc/pom.xml                           |  124 -
 .../org/apache/arrow/adapter/orc/OrcFieldNode.java |   45 -
 .../org/apache/arrow/adapter/orc/OrcJniUtils.java  |   62 -
 .../arrow/adapter/orc/OrcMemoryJniWrapper.java     |   77 -
 .../org/apache/arrow/adapter/orc/OrcReader.java    |   90 -
 .../arrow/adapter/orc/OrcReaderJniWrapper.java     |   79 -
 .../apache/arrow/adapter/orc/OrcRecordBatch.java   |   47 -
 .../arrow/adapter/orc/OrcReferenceManager.java     |  121 -
 .../apache/arrow/adapter/orc/OrcStripeReader.java  |  109 -
 .../adapter/orc/OrcStripeReaderJniWrapper.java     |   45 -
 .../apache/arrow/adapter/orc/OrcReaderTest.java    |  104 -
 java/algorithm/pom.xml                             |   55 -
 .../algorithm/deduplicate/DeduplicationUtils.java  |   96 -
 .../deduplicate/VectorRunDeduplicator.java         |  108 -
 .../algorithm/dictionary/DictionaryBuilder.java    |   72 -
 .../algorithm/dictionary/DictionaryEncoder.java    |   39 -
 .../HashTableBasedDictionaryBuilder.java           |  153 -
 .../dictionary/HashTableDictionaryEncoder.java     |  146 -
 .../dictionary/LinearDictionaryEncoder.java        |  112 -
 .../dictionary/SearchDictionaryEncoder.java        |  100 -
 .../SearchTreeBasedDictionaryBuilder.java          |  146 -
 .../arrow/algorithm/misc/PartialSumUtils.java      |  119 -
 .../apache/arrow/algorithm/rank/VectorRank.java    |   89 -
 .../arrow/algorithm/search/ParallelSearcher.java   |  190 --
 .../algorithm/search/VectorRangeSearcher.java      |  108 -
 .../arrow/algorithm/search/VectorSearcher.java     |   88 -
 .../algorithm/sort/CompositeVectorComparator.java  |   71 -
 .../algorithm/sort/DefaultVectorComparators.java   |  431 ---
 .../sort/FixedWidthInPlaceVectorSorter.java        |  169 --
 .../sort/FixedWidthOutOfPlaceVectorSorter.java     |   80 -
 .../arrow/algorithm/sort/InPlaceVectorSorter.java  |   37 -
 .../apache/arrow/algorithm/sort/IndexSorter.java   |  180 --
 .../arrow/algorithm/sort/InsertionSorter.java      |   74 -
 .../arrow/algorithm/sort/OffHeapIntStack.java      |   72 -
 .../algorithm/sort/OutOfPlaceVectorSorter.java     |   37 -
 .../algorithm/sort/StableVectorComparator.java     |   66 -
 .../sort/VariableWidthOutOfPlaceVectorSorter.java  |   93 -
 .../algorithm/sort/VectorValueComparator.java      |  123 -
 .../deduplicate/TestDeduplicationUtils.java        |  135 -
 .../deduplicate/TestVectorRunDeduplicator.java     |  131 -
 .../TestHashTableBasedDictionaryBuilder.java       |  202 --
 .../dictionary/TestHashTableDictionaryEncoder.java |  350 ---
 .../dictionary/TestLinearDictionaryEncoder.java    |  350 ---
 .../dictionary/TestSearchDictionaryEncoder.java    |  357 ---
 .../TestSearchTreeBasedDictionaryBuilder.java      |  221 --
 .../arrow/algorithm/misc/TestPartialSumUtils.java  |  138 -
 .../arrow/algorithm/rank/TestVectorRank.java       |  145 -
 .../algorithm/search/TestParallelSearcher.java     |  150 -
 .../algorithm/search/TestVectorRangeSearcher.java  |  195 --
 .../arrow/algorithm/search/TestVectorSearcher.java |  299 --
 .../sort/TestCompositeVectorComparator.java        |  112 -
 .../sort/TestDefaultVectorComparator.java          |  393 ---
 .../sort/TestFixedWidthInPlaceVectorSorter.java    |  240 --
 .../sort/TestFixedWidthOutOfPlaceVectorSorter.java |  365 ---
 .../algorithm/sort/TestFixedWidthSorting.java      |  172 --
 .../arrow/algorithm/sort/TestIndexSorter.java      |  205 --
 .../arrow/algorithm/sort/TestInsertionSorter.java  |  117 -
 .../arrow/algorithm/sort/TestOffHeapIntStack.java  |   67 -
 .../arrow/algorithm/sort/TestSortingUtil.java      |  166 --
 .../algorithm/sort/TestStableVectorComparator.java |  137 -
 .../TestVariableWidthOutOfPlaceVectorSorter.java   |   99 -
 .../algorithm/sort/TestVariableWidthSorting.java   |  165 --
 java/api-changes.md                                |   32 -
 java/compression/pom.xml                           |   52 -
 .../compression/CommonsCompressionFactory.java     |   43 -
 .../arrow/compression/Lz4CompressionCodec.java     |   89 -
 .../arrow/compression/ZstdCompressionCodec.java    |   74 -
 .../arrow/compression/TestCompressionCodec.java    |  213 --
 java/dataset/CMakeLists.txt                        |   43 -
 java/dataset/pom.xml                               |  134 -
 .../org/apache/arrow/dataset/file/FileFormat.java  |   36 -
 .../dataset/file/FileSystemDatasetFactory.java     |   38 -
 .../org/apache/arrow/dataset/file/JniWrapper.java  |   47 -
 .../dataset/jni/DirectReservationListener.java     |   97 -
 .../org/apache/arrow/dataset/jni/JniLoader.java    |   94 -
 .../org/apache/arrow/dataset/jni/JniWrapper.java   |  105 -
 .../apache/arrow/dataset/jni/NativeContext.java    |   53 -
 .../apache/arrow/dataset/jni/NativeDataset.java    |   56 -
 .../arrow/dataset/jni/NativeDatasetFactory.java    |  104 -
 .../jni/NativeInstanceReleasedException.java       |   31 -
 .../apache/arrow/dataset/jni/NativeMemoryPool.java |   76 -
 .../arrow/dataset/jni/NativeRecordBatchHandle.java |  106 -
 .../apache/arrow/dataset/jni/NativeScanTask.java   |   46 -
 .../apache/arrow/dataset/jni/NativeScanner.java    |  170 --
 .../arrow/dataset/jni/ReservationListener.java     |   36 -
 .../apache/arrow/dataset/scanner/ScanOptions.java  |   44 -
 .../org/apache/arrow/dataset/scanner/ScanTask.java |   42 -
 .../org/apache/arrow/dataset/scanner/Scanner.java  |   41 -
 .../org/apache/arrow/dataset/source/Dataset.java   |   35 -
 .../arrow/dataset/source/DatasetFactory.java       |   51 -
 .../arrow/memory/NativeUnderlyingMemory.java       |   81 -
 .../apache/arrow/dataset/ParquetWriteSupport.java  |  123 -
 .../java/org/apache/arrow/dataset/TestDataset.java |   97 -
 .../arrow/dataset/file/TestFileSystemDataset.java  |  292 --
 .../dataset/file/TestFileSystemDatasetFactory.java |   48 -
 .../arrow/dataset/jni/TestNativeDataset.java       |   33 -
 .../arrow/dataset/jni/TestReservationListener.java |   88 -
 .../arrow/memory/TestNativeUnderlyingMemory.java   |  110 -
 .../src/test/resources/avroschema/user.avsc        |   26 -
 java/dev/checkstyle/checkstyle.license             |   16 -
 java/dev/checkstyle/checkstyle.xml                 |  280 --
 java/dev/checkstyle/suppressions.xml               |   42 -
 java/flight/flight-core/README.md                  |   95 -
 java/flight/flight-core/pom.xml                    |  392 ---
 .../main/java/org/apache/arrow/flight/Action.java  |   61 -
 .../java/org/apache/arrow/flight/ActionType.java   |   70 -
 .../java/org/apache/arrow/flight/ArrowMessage.java |  560 ----
 .../org/apache/arrow/flight/AsyncPutListener.java  |   72 -
 .../apache/arrow/flight/BackpressureStrategy.java  |  172 --
 .../java/org/apache/arrow/flight/CallHeaders.java  |   65 -
 .../java/org/apache/arrow/flight/CallInfo.java     |   33 -
 .../java/org/apache/arrow/flight/CallOption.java   |   24 -
 .../java/org/apache/arrow/flight/CallOptions.java  |   62 -
 .../java/org/apache/arrow/flight/CallStatus.java   |  143 -
 .../java/org/apache/arrow/flight/Criteria.java     |   58 -
 .../org/apache/arrow/flight/DictionaryUtils.java   |  127 -
 .../apache/arrow/flight/ErrorFlightMetadata.java   |   81 -
 .../apache/arrow/flight/FlightBindingService.java  |  174 --
 .../org/apache/arrow/flight/FlightCallHeaders.java |  111 -
 .../java/org/apache/arrow/flight/FlightClient.java |  721 -----
 .../arrow/flight/FlightClientMiddleware.java       |   52 -
 .../org/apache/arrow/flight/FlightConstants.java   |   29 -
 .../org/apache/arrow/flight/FlightDescriptor.java  |  180 --
 .../org/apache/arrow/flight/FlightEndpoint.java    |  106 -
 .../java/org/apache/arrow/flight/FlightInfo.java   |  208 --
 .../java/org/apache/arrow/flight/FlightMethod.java |   64 -
 .../org/apache/arrow/flight/FlightProducer.java    |  164 --
 .../arrow/flight/FlightRuntimeException.java       |   46 -
 .../java/org/apache/arrow/flight/FlightServer.java |  399 ---
 .../arrow/flight/FlightServerMiddleware.java       |  100 -
 .../org/apache/arrow/flight/FlightService.java     |  427 ---
 .../org/apache/arrow/flight/FlightStatusCode.java  |   82 -
 .../java/org/apache/arrow/flight/FlightStream.java |  497 ----
 .../org/apache/arrow/flight/HeaderCallOption.java  |   52 -
 .../java/org/apache/arrow/flight/Location.java     |  158 -
 .../org/apache/arrow/flight/LocationSchemes.java   |   32 -
 .../apache/arrow/flight/NoOpFlightProducer.java    |   61 -
 .../apache/arrow/flight/NoOpStreamListener.java    |   49 -
 .../arrow/flight/OutboundStreamListener.java       |  123 -
 .../arrow/flight/OutboundStreamListenerImpl.java   |  132 -
 .../java/org/apache/arrow/flight/PutResult.java    |   96 -
 .../org/apache/arrow/flight/RequestContext.java    |   51 -
 .../main/java/org/apache/arrow/flight/Result.java  |   50 -
 .../java/org/apache/arrow/flight/SchemaResult.java |   96 -
 .../arrow/flight/ServerHeaderMiddleware.java       |   65 -
 .../java/org/apache/arrow/flight/StreamPipe.java   |  118 -
 .../org/apache/arrow/flight/SyncPutListener.java   |  122 -
 .../main/java/org/apache/arrow/flight/Ticket.java  |  102 -
 .../apache/arrow/flight/auth/AuthConstants.java    |   51 -
 .../arrow/flight/auth/BasicClientAuthHandler.java  |   58 -
 .../arrow/flight/auth/BasicServerAuthHandler.java  |   74 -
 .../arrow/flight/auth/ClientAuthHandler.java       |   55 -
 .../arrow/flight/auth/ClientAuthInterceptor.java   |   73 -
 .../arrow/flight/auth/ClientAuthWrapper.java       |  162 --
 .../arrow/flight/auth/ServerAuthHandler.java       |   72 -
 .../arrow/flight/auth/ServerAuthInterceptor.java   |   85 -
 .../arrow/flight/auth/ServerAuthWrapper.java       |  144 -
 .../apache/arrow/flight/auth2/Auth2Constants.java  |   31 -
 .../apache/arrow/flight/auth2/AuthUtilities.java   |   47 -
 .../flight/auth2/BasicAuthCredentialWriter.java    |   44 -
 .../flight/auth2/BasicCallHeaderAuthenticator.java |   88 -
 .../arrow/flight/auth2/BearerCredentialWriter.java |   39 -
 .../flight/auth2/BearerTokenAuthenticator.java     |   62 -
 .../flight/auth2/CallHeaderAuthenticator.java      |   86 -
 .../flight/auth2/ClientBearerHeaderHandler.java    |   36 -
 .../arrow/flight/auth2/ClientHandshakeWrapper.java |  100 -
 .../arrow/flight/auth2/ClientHeaderHandler.java    |   43 -
 .../auth2/ClientIncomingAuthHeaderMiddleware.java  |   78 -
 .../auth2/GeneratedBearerTokenAuthenticator.java   |  128 -
 .../auth2/ServerCallHeaderAuthMiddleware.java      |   74 -
 .../flight/client/ClientCookieMiddleware.java      |  130 -
 .../arrow/flight/example/ExampleFlightServer.java  |   93 -
 .../apache/arrow/flight/example/ExampleTicket.java |  141 -
 .../apache/arrow/flight/example/FlightHolder.java  |  131 -
 .../apache/arrow/flight/example/InMemoryStore.java |  176 --
 .../org/apache/arrow/flight/example/Stream.java    |  177 --
 .../integration/AuthBasicProtoScenario.java        |   97 -
 .../example/integration/IntegrationAssertions.java |   74 -
 .../example/integration/IntegrationTestClient.java |  197 --
 .../example/integration/IntegrationTestServer.java |   97 -
 .../example/integration/MiddlewareScenario.java    |  168 --
 .../arrow/flight/example/integration/Scenario.java |   45 -
 .../flight/example/integration/Scenarios.java      |   90 -
 .../arrow/flight/grpc/AddWritableBuffer.java       |  128 -
 .../arrow/flight/grpc/CallCredentialAdapter.java   |   53 -
 .../flight/grpc/ClientInterceptorAdapter.java      |  149 -
 .../grpc/ContextPropagatingExecutorService.java    |  117 -
 .../arrow/flight/grpc/CredentialCallOption.java    |   41 -
 .../arrow/flight/grpc/GetReadableBuffer.java       |   99 -
 .../apache/arrow/flight/grpc/MetadataAdapter.java  |   98 -
 .../arrow/flight/grpc/RequestContextAdapter.java   |   57 -
 .../flight/grpc/ServerInterceptorAdapter.java      |  145 -
 .../org/apache/arrow/flight/grpc/StatusUtils.java  |  230 --
 .../org/apache/arrow/flight/FlightTestUtil.java    |  150 -
 .../arrow/flight/TestApplicationMetadata.java      |  329 ---
 .../java/org/apache/arrow/flight/TestAuth.java     |   93 -
 .../org/apache/arrow/flight/TestBackPressure.java  |  262 --
 .../apache/arrow/flight/TestBasicOperation.java    |  567 ----
 .../org/apache/arrow/flight/TestCallOptions.java   |  191 --
 .../apache/arrow/flight/TestClientMiddleware.java  |  359 ---
 .../apache/arrow/flight/TestDictionaryUtils.java   |   91 -
 .../org/apache/arrow/flight/TestDoExchange.java    |  536 ----
 .../org/apache/arrow/flight/TestErrorMetadata.java |   92 -
 .../org/apache/arrow/flight/TestFlightClient.java  |  225 --
 .../org/apache/arrow/flight/TestFlightService.java |  125 -
 .../org/apache/arrow/flight/TestLargeMessage.java  |  165 --
 .../java/org/apache/arrow/flight/TestLeak.java     |  182 --
 .../apache/arrow/flight/TestMetadataVersion.java   |  319 ---
 .../apache/arrow/flight/TestServerMiddleware.java  |  360 ---
 .../org/apache/arrow/flight/TestServerOptions.java |  176 --
 .../test/java/org/apache/arrow/flight/TestTls.java |  145 -
 .../apache/arrow/flight/auth/TestBasicAuth.java    |  158 -
 .../apache/arrow/flight/auth2/TestBasicAuth2.java  |  232 --
 .../arrow/flight/client/TestCookieHandling.java    |  267 --
 .../arrow/flight/example/TestExampleServer.java    |  117 -
 .../arrow/flight/perf/PerformanceTestServer.java   |  216 --
 .../org/apache/arrow/flight/perf/TestPerf.java     |  199 --
 .../flight-core/src/test/protobuf/perf.proto       |   45 -
 .../flight-core/src/test/resources/logback.xml     |   28 -
 java/flight/flight-grpc/pom.xml                    |  132 -
 .../org/apache/arrow/flight/FlightGrpcUtils.java   |  161 --
 .../apache/arrow/flight/TestFlightGrpcUtils.java   |  193 --
 .../flight-grpc/src/test/protobuf/test.proto       |   26 -
 java/format/pom.xml                                |   46 -
 .../main/java/org/apache/arrow/flatbuf/Binary.java |   51 -
 .../main/java/org/apache/arrow/flatbuf/Block.java  |   61 -
 .../org/apache/arrow/flatbuf/BodyCompression.java  |   72 -
 .../arrow/flatbuf/BodyCompressionMethod.java       |   43 -
 .../main/java/org/apache/arrow/flatbuf/Bool.java   |   48 -
 .../main/java/org/apache/arrow/flatbuf/Buffer.java |   63 -
 .../org/apache/arrow/flatbuf/CompressionType.java  |   30 -
 .../main/java/org/apache/arrow/flatbuf/Date.java   |   65 -
 .../java/org/apache/arrow/flatbuf/DateUnit.java    |   30 -
 .../java/org/apache/arrow/flatbuf/Decimal.java     |   81 -
 .../org/apache/arrow/flatbuf/DictionaryBatch.java  |   79 -
 .../apache/arrow/flatbuf/DictionaryEncoding.java   |   88 -
 .../org/apache/arrow/flatbuf/DictionaryKind.java   |   36 -
 .../java/org/apache/arrow/flatbuf/Duration.java    |   57 -
 .../java/org/apache/arrow/flatbuf/Endianness.java  |   34 -
 .../java/org/apache/arrow/flatbuf/Feature.java     |   62 -
 .../main/java/org/apache/arrow/flatbuf/Field.java  |  120 -
 .../java/org/apache/arrow/flatbuf/FieldNode.java   |   68 -
 .../org/apache/arrow/flatbuf/FixedSizeBinary.java  |   60 -
 .../org/apache/arrow/flatbuf/FixedSizeList.java    |   60 -
 .../org/apache/arrow/flatbuf/FloatingPoint.java    |   57 -
 .../main/java/org/apache/arrow/flatbuf/Footer.java |  100 -
 .../main/java/org/apache/arrow/flatbuf/Int.java    |   61 -
 .../java/org/apache/arrow/flatbuf/Interval.java    |   57 -
 .../org/apache/arrow/flatbuf/IntervalUnit.java     |   30 -
 .../java/org/apache/arrow/flatbuf/KeyValue.java    |   70 -
 .../java/org/apache/arrow/flatbuf/LargeBinary.java |   52 -
 .../java/org/apache/arrow/flatbuf/LargeList.java   |   52 -
 .../java/org/apache/arrow/flatbuf/LargeUtf8.java   |   52 -
 .../main/java/org/apache/arrow/flatbuf/List.java   |   48 -
 .../main/java/org/apache/arrow/flatbuf/Map.java    |   87 -
 .../java/org/apache/arrow/flatbuf/Message.java     |   81 -
 .../org/apache/arrow/flatbuf/MessageHeader.java    |   44 -
 .../org/apache/arrow/flatbuf/MetadataVersion.java  |   54 -
 .../main/java/org/apache/arrow/flatbuf/Null.java   |   51 -
 .../java/org/apache/arrow/flatbuf/Precision.java   |   31 -
 .../java/org/apache/arrow/flatbuf/RecordBatch.java |  103 -
 .../main/java/org/apache/arrow/flatbuf/Schema.java |  102 -
 .../arrow/flatbuf/SparseMatrixCompressedAxis.java  |   30 -
 .../apache/arrow/flatbuf/SparseMatrixIndexCSX.java |  114 -
 .../org/apache/arrow/flatbuf/SparseTensor.java     |   92 -
 .../apache/arrow/flatbuf/SparseTensorIndex.java    |   32 -
 .../apache/arrow/flatbuf/SparseTensorIndexCOO.java |  118 -
 .../apache/arrow/flatbuf/SparseTensorIndexCSF.java |  173 --
 .../java/org/apache/arrow/flatbuf/Struct_.java     |   53 -
 .../main/java/org/apache/arrow/flatbuf/Tensor.java |   91 -
 .../java/org/apache/arrow/flatbuf/TensorDim.java   |   74 -
 .../main/java/org/apache/arrow/flatbuf/Time.java   |   66 -
 .../java/org/apache/arrow/flatbuf/TimeUnit.java    |   32 -
 .../java/org/apache/arrow/flatbuf/Timestamp.java   |   93 -
 .../main/java/org/apache/arrow/flatbuf/Type.java   |   55 -
 .../main/java/org/apache/arrow/flatbuf/Union.java  |   74 -
 .../java/org/apache/arrow/flatbuf/UnionMode.java   |   30 -
 .../main/java/org/apache/arrow/flatbuf/Utf8.java   |   51 -
 java/gandiva/CMakeLists.txt                        |   62 -
 java/gandiva/README.md                             |   32 -
 java/gandiva/pom.xml                               |  155 -
 .../gandiva/evaluator/ConfigurationBuilder.java    |   72 -
 .../arrow/gandiva/evaluator/DecimalTypeUtil.java   |   94 -
 .../gandiva/evaluator/ExpressionRegistry.java      |  220 --
 .../evaluator/ExpressionRegistryJniHelper.java     |   29 -
 .../org/apache/arrow/gandiva/evaluator/Filter.java |  199 --
 .../arrow/gandiva/evaluator/FunctionSignature.java |   93 -
 .../apache/arrow/gandiva/evaluator/JniLoader.java  |  170 --
 .../apache/arrow/gandiva/evaluator/JniWrapper.java |  120 -
 .../apache/arrow/gandiva/evaluator/Projector.java  |  364 ---
 .../arrow/gandiva/evaluator/SelectionVector.java   |   87 -
 .../gandiva/evaluator/SelectionVectorInt16.java    |   49 -
 .../gandiva/evaluator/SelectionVectorInt32.java    |   48 -
 .../arrow/gandiva/evaluator/VectorExpander.java    |   69 -
 .../exceptions/EvaluatorClosedException.java       |   25 -
 .../arrow/gandiva/exceptions/GandivaException.java |   35 -
 .../exceptions/UnsupportedTypeException.java       |   27 -
 .../apache/arrow/gandiva/expression/AndNode.java   |   47 -
 .../arrow/gandiva/expression/ArrowTypeHelper.java  |  350 ---
 .../arrow/gandiva/expression/BinaryNode.java       |   45 -
 .../arrow/gandiva/expression/BooleanNode.java      |   43 -
 .../apache/arrow/gandiva/expression/Condition.java |   42 -
 .../arrow/gandiva/expression/DecimalNode.java      |   49 -
 .../arrow/gandiva/expression/DoubleNode.java       |   43 -
 .../arrow/gandiva/expression/ExpressionTree.java   |   46 -
 .../apache/arrow/gandiva/expression/FieldNode.java |   43 -
 .../apache/arrow/gandiva/expression/FloatNode.java |   43 -
 .../arrow/gandiva/expression/FunctionNode.java     |   54 -
 .../apache/arrow/gandiva/expression/IfNode.java    |   52 -
 .../apache/arrow/gandiva/expression/InNode.java    |  121 -
 .../apache/arrow/gandiva/expression/IntNode.java   |   43 -
 .../apache/arrow/gandiva/expression/LongNode.java  |   43 -
 .../apache/arrow/gandiva/expression/NullNode.java  |   41 -
 .../apache/arrow/gandiva/expression/OrNode.java    |   47 -
 .../arrow/gandiva/expression/StringNode.java       |   48 -
 .../arrow/gandiva/expression/TreeBuilder.java      |  220 --
 .../apache/arrow/gandiva/expression/TreeNode.java  |   34 -
 .../arrow/gandiva/evaluator/BaseEvaluatorTest.java |  404 ---
 .../gandiva/evaluator/DecimalTypeUtilTest.java     |   89 -
 .../gandiva/evaluator/ExpressionRegistryTest.java  |   65 -
 .../arrow/gandiva/evaluator/FilterProjectTest.java |  102 -
 .../apache/arrow/gandiva/evaluator/FilterTest.java |  315 --
 .../gandiva/evaluator/MicroBenchmarkTest.java      |  151 -
 .../gandiva/evaluator/ProjectorDecimalTest.java    |  797 ------
 .../arrow/gandiva/evaluator/ProjectorTest.java     | 2237 ---------------
 .../arrow/gandiva/evaluator/TestJniLoader.java     |   53 -
 .../gandiva/expression/ArrowTypeHelperTest.java    |  105 -
 .../arrow/gandiva/expression/TreeBuilderTest.java  |  350 ---
 java/gandiva/src/test/resources/logback.xml        |   28 -
 java/memory/memory-core/pom.xml                    |   58 -
 .../java/org/apache/arrow/memory/Accountant.java   |  308 --
 .../apache/arrow/memory/AllocationListener.java    |   85 -
 .../org/apache/arrow/memory/AllocationManager.java |  221 --
 .../org/apache/arrow/memory/AllocationOutcome.java |   97 -
 .../arrow/memory/AllocationOutcomeDetails.java     |  132 -
 .../apache/arrow/memory/AllocationReservation.java |   88 -
 .../arrow/memory/AllocatorClosedException.java     |   35 -
 .../java/org/apache/arrow/memory/ArrowBuf.java     | 1245 --------
 .../org/apache/arrow/memory/BaseAllocator.java     |  946 ------
 .../org/apache/arrow/memory/BoundsChecking.java    |   63 -
 .../org/apache/arrow/memory/BufferAllocator.java   |  228 --
 .../java/org/apache/arrow/memory/BufferLedger.java |  525 ----
 .../org/apache/arrow/memory/BufferManager.java     |   53 -
 .../org/apache/arrow/memory/CheckAllocator.java    |   87 -
 .../org/apache/arrow/memory/ChildAllocator.java    |   44 -
 .../memory/DefaultAllocationManagerOption.java     |  133 -
 .../arrow/memory/LowCostIdentityHashMap.java       |  336 ---
 .../apache/arrow/memory/OutOfMemoryException.java  |   67 -
 .../apache/arrow/memory/OwnershipTransferNOOP.java |   39 -
 .../arrow/memory/OwnershipTransferResult.java      |   28 -
 .../main/java/org/apache/arrow/memory/README.md    |  121 -
 .../org/apache/arrow/memory/ReferenceManager.java  |  175 --
 .../org/apache/arrow/memory/RootAllocator.java     |   71 -
 .../apache/arrow/memory/ValueWithKeyIncluded.java  |   28 -
 .../java/org/apache/arrow/memory/package-info.java |   26 -
 .../memory/rounding/DefaultRoundingPolicy.java     |  114 -
 .../arrow/memory/rounding/RoundingPolicy.java      |   26 -
 .../memory/rounding/SegmentRoundingPolicy.java     |   60 -
 .../apache/arrow/memory/util/ArrowBufPointer.java  |  187 --
 .../apache/arrow/memory/util/AssertionUtil.java    |   40 -
 .../arrow/memory/util/AutoCloseableLock.java       |   43 -
 .../arrow/memory/util/ByteFunctionHelpers.java     |  347 ---
 .../org/apache/arrow/memory/util/CommonUtil.java   |   79 -
 .../apache/arrow/memory/util/HistoricalLog.java    |  178 --
 .../apache/arrow/memory/util/LargeMemoryUtil.java  |   44 -
 .../org/apache/arrow/memory/util/MemoryUtil.java   |  170 --
 .../org/apache/arrow/memory/util/StackTrace.java   |   70 -
 .../arrow/memory/util/hash/ArrowBufHasher.java     |   47 -
 .../arrow/memory/util/hash/MurmurHasher.java       |  175 --
 .../arrow/memory/util/hash/SimpleHasher.java       |  116 -
 .../java/org/apache/arrow/util/AutoCloseables.java |  227 --
 .../java/org/apache/arrow/util/Collections2.java   |   68 -
 .../java/org/apache/arrow/util/Preconditions.java  | 1323 ---------
 .../org/apache/arrow/util/VisibleForTesting.java   |   26 -
 .../memory/DefaultAllocationManagerFactory.java    |   63 -
 .../org/apache/arrow/memory/TestAccountant.java    |  172 --
 .../apache/arrow/memory/TestAllocationManager.java |   39 -
 .../java/org/apache/arrow/memory/TestArrowBuf.java |  149 -
 .../apache/arrow/memory/TestBoundaryChecking.java  |  150 -
 .../arrow/memory/TestLowCostIdentityHashMap.java   |  169 --
 .../arrow/memory/util/TestArrowBufPointer.java     |  216 --
 .../arrow/memory/util/TestByteFunctionHelpers.java |  167 --
 .../arrow/memory/util/TestLargeMemoryUtil.java     |  105 -
 .../arrow/memory/util/hash/TestArrowBufHasher.java |  123 -
 java/memory/memory-netty/pom.xml                   |  101 -
 .../java/io/netty/buffer/ExpandableByteBuf.java    |   56 -
 .../src/main/java/io/netty/buffer/LargeBuffer.java |   34 -
 .../io/netty/buffer/MutableWrappedByteBuf.java     |  448 ---
 .../main/java/io/netty/buffer/NettyArrowBuf.java   |  622 ----
 .../io/netty/buffer/PooledByteBufAllocatorL.java   |  280 --
 .../io/netty/buffer/UnsafeDirectLittleEndian.java  |  270 --
 .../apache/arrow/memory/ArrowByteBufAllocator.java |  161 --
 .../memory/DefaultAllocationManagerFactory.java    |   38 -
 .../arrow/memory/NettyAllocationManager.java       |  123 -
 .../java/io/netty/buffer/TestNettyArrowBuf.java    |  141 -
 .../netty/buffer/TestUnsafeDirectLittleEndian.java |   77 -
 .../apache/arrow/memory/ITTestLargeArrowBuf.java   |   72 -
 .../arrow/memory/TestAllocationManagerNetty.java   |   39 -
 .../org/apache/arrow/memory/TestBaseAllocator.java | 1183 --------
 .../org/apache/arrow/memory/TestEmptyArrowBuf.java |   88 -
 .../org/apache/arrow/memory/TestEndianness.java    |   51 -
 .../arrow/memory/TestNettyAllocationManager.java   |  108 -
 .../memory-netty/src/test/resources/logback.xml    |   28 -
 java/memory/memory-unsafe/pom.xml                  |   52 -
 .../memory/DefaultAllocationManagerFactory.java    |   37 -
 .../arrow/memory/UnsafeAllocationManager.java      |   70 -
 .../arrow/memory/TestAllocationManagerUnsafe.java  |   41 -
 .../arrow/memory/TestUnsafeAllocationManager.java  |   68 -
 java/memory/pom.xml                                |   29 -
 .../io/netty/buffer/TestExpandableByteBuf.java     |  117 -
 java/performance/pom.xml                           |  222 --
 .../arrow/adapter/AvroAdapterBenchmarks.java       |  141 -
 .../arrow/adapter/jdbc/JdbcAdapterBenchmarks.java  |  359 ---
 .../search/ParallelSearcherBenchmarks.java         |  115 -
 .../apache/arrow/memory/AllocatorBenchmarks.java   |   95 -
 .../apache/arrow/memory/ArrowBufBenchmarks.java    |   82 -
 .../memory/util/ArrowBufPointerBenchmarks.java     |  107 -
 .../memory/util/ByteFunctionHelpersBenchmarks.java |  138 -
 .../arrow/vector/BaseValueVectorBenchmarks.java    |   95 -
 .../arrow/vector/BitVectorHelperBenchmarks.java    |  229 --
 .../arrow/vector/DecimalVectorBenchmarks.java      |  121 -
 .../org/apache/arrow/vector/Float8Benchmarks.java  |  122 -
 .../arrow/vector/FloatingPointBenchmarks.java      |  134 -
 .../org/apache/arrow/vector/IntBenchmarks.java     |  110 -
 .../org/apache/arrow/vector/VarCharBenchmarks.java |  102 -
 .../vector/VariableWidthVectorBenchmarks.java      |  130 -
 .../apache/arrow/vector/VectorLoaderBenchmark.java |  117 -
 .../arrow/vector/VectorUnloaderBenchmark.java      |  109 -
 .../dictionary/DictionaryEncoderBenchmarks.java    |  147 -
 .../arrow/vector/ipc/WriteChannelBenchmark.java    |   87 -
 .../ipc/message/ArrowRecordBatchBenchmarks.java    |   98 -
 .../arrow/vector/util/TransferPairBenchmarks.java  |  123 -
 java/plasma/README.md                              |   39 -
 java/plasma/pom.xml                                |   34 -
 .../org/apache/arrow/plasma/ObjectStoreLink.java   |  131 -
 .../java/org/apache/arrow/plasma/PlasmaClient.java |  184 --
 .../org/apache/arrow/plasma/PlasmaClientJNI.java   |   57 -
 .../exceptions/DuplicateObjectException.java       |   32 -
 .../plasma/exceptions/PlasmaClientException.java   |   32 -
 .../exceptions/PlasmaOutOfMemoryException.java     |   40 -
 .../org/apache/arrow/plasma/PlasmaClientTest.java  |  313 --
 java/plasma/src/test/resources/logback.xml         |   28 -
 java/plasma/test.sh                                |   56 -
 java/pom.xml                                       |  764 -----
 java/tools/pom.xml                                 |  106 -
 .../java/org/apache/arrow/tools/EchoServer.java    |  146 -
 .../java/org/apache/arrow/tools/FileRoundtrip.java |  123 -
 .../java/org/apache/arrow/tools/FileToStream.java  |   78 -
 .../java/org/apache/arrow/tools/Integration.java   |  244 --
 .../java/org/apache/arrow/tools/StreamToFile.java  |   76 -
 .../apache/arrow/tools/ArrowFileTestFixtures.java  |  105 -
 .../org/apache/arrow/tools/EchoServerTest.java     |  301 --
 .../org/apache/arrow/tools/TestFileRoundtrip.java  |   65 -
 .../org/apache/arrow/tools/TestIntegration.java    |  288 --
 java/tools/src/test/resources/logback.xml          |   27 -
 java/vector/pom.xml                                |  291 --
 java/vector/src/main/codegen/config.fmpp           |   24 -
 java/vector/src/main/codegen/data/ArrowTypes.tdd   |  124 -
 .../src/main/codegen/data/ValueVectorTypes.tdd     |  196 --
 java/vector/src/main/codegen/includes/license.ftl  |   16 -
 .../src/main/codegen/includes/vv_imports.ftl       |   60 -
 .../codegen/templates/AbstractFieldReader.java     |  128 -
 .../codegen/templates/AbstractFieldWriter.java     |  177 --
 .../templates/AbstractPromotableFieldWriter.java   |  189 --
 .../src/main/codegen/templates/ArrowType.java      |  375 ---
 .../src/main/codegen/templates/BaseReader.java     |   75 -
 .../src/main/codegen/templates/BaseWriter.java     |  116 -
 .../templates/CaseSensitiveStructWriters.java      |   53 -
 .../src/main/codegen/templates/ComplexCopier.java  |  191 --
 .../src/main/codegen/templates/ComplexReaders.java |  147 -
 .../src/main/codegen/templates/ComplexWriters.java |  211 --
 .../main/codegen/templates/DenseUnionReader.java   |  217 --
 .../main/codegen/templates/DenseUnionVector.java   |  896 ------
 .../main/codegen/templates/DenseUnionWriter.java   |  262 --
 .../main/codegen/templates/HolderReaderImpl.java   |  170 --
 .../src/main/codegen/templates/NullReader.java     |  147 -
 .../src/main/codegen/templates/StructWriters.java  |  286 --
 .../templates/UnionFixedSizeListWriter.java        |  296 --
 .../main/codegen/templates/UnionListWriter.java    |  303 --
 .../src/main/codegen/templates/UnionMapWriter.java |  210 --
 .../src/main/codegen/templates/UnionReader.java    |  210 --
 .../src/main/codegen/templates/UnionVector.java    |  800 ------
 .../src/main/codegen/templates/UnionWriter.java    |  278 --
 .../src/main/codegen/templates/ValueHolders.java   |   79 -
 .../org/apache/arrow/vector/AddOrGetResult.java    |   46 -
 .../org/apache/arrow/vector/AllocationHelper.java  |   95 -
 .../apache/arrow/vector/BaseFixedWidthVector.java  |  923 ------
 .../org/apache/arrow/vector/BaseIntVector.java     |   43 -
 .../arrow/vector/BaseLargeVariableWidthVector.java | 1363 ---------
 .../org/apache/arrow/vector/BaseValueVector.java   |  232 --
 .../arrow/vector/BaseVariableWidthVector.java      | 1414 ---------
 .../java/org/apache/arrow/vector/BigIntVector.java |  358 ---
 .../java/org/apache/arrow/vector/BitVector.java    |  599 ----
 .../org/apache/arrow/vector/BitVectorHelper.java   |  459 ---
 .../java/org/apache/arrow/vector/BufferBacked.java |   31 -
 .../java/org/apache/arrow/vector/BufferLayout.java |  153 -
 .../org/apache/arrow/vector/DateDayVector.java     |  347 ---
 .../org/apache/arrow/vector/DateMilliVector.java   |  350 ---
 .../org/apache/arrow/vector/Decimal256Vector.java  |  584 ----
 .../org/apache/arrow/vector/DecimalVector.java     |  584 ----
 .../apache/arrow/vector/DensityAwareVector.java    |   57 -
 .../org/apache/arrow/vector/DurationVector.java    |  406 ---
 .../arrow/vector/ElementAddressableVector.java     |   42 -
 .../apache/arrow/vector/ExtensionTypeVector.java   |  266 --
 .../java/org/apache/arrow/vector/FieldVector.java  |   90 -
 .../apache/arrow/vector/FixedSizeBinaryVector.java |  386 ---
 .../org/apache/arrow/vector/FixedWidthVector.java  |   36 -
 .../java/org/apache/arrow/vector/Float4Vector.java |  361 ---
 .../java/org/apache/arrow/vector/Float8Vector.java |  362 ---
 .../apache/arrow/vector/FloatingPointVector.java   |   46 -
 .../apache/arrow/vector/GenerateSampleData.java    |  337 ---
 .../java/org/apache/arrow/vector/IntVector.java    |  362 ---
 .../org/apache/arrow/vector/IntervalDayVector.java |  433 ---
 .../apache/arrow/vector/IntervalYearVector.java    |  382 ---
 .../apache/arrow/vector/LargeVarBinaryVector.java  |  305 --
 .../apache/arrow/vector/LargeVarCharVector.java    |  331 ---
 .../apache/arrow/vector/NullCheckingForGet.java    |   84 -
 .../java/org/apache/arrow/vector/NullVector.java   |  292 --
 .../apache/arrow/vector/SchemaChangeCallBack.java  |   57 -
 .../org/apache/arrow/vector/SmallIntVector.java    |  389 ---
 .../org/apache/arrow/vector/TimeMicroVector.java   |  347 ---
 .../org/apache/arrow/vector/TimeMilliVector.java   |  351 ---
 .../org/apache/arrow/vector/TimeNanoVector.java    |  347 ---
 .../org/apache/arrow/vector/TimeSecVector.java     |  348 ---
 .../arrow/vector/TimeStampMicroTZVector.java       |  239 --
 .../apache/arrow/vector/TimeStampMicroVector.java  |  236 --
 .../arrow/vector/TimeStampMilliTZVector.java       |  238 --
 .../apache/arrow/vector/TimeStampMilliVector.java  |  236 --
 .../apache/arrow/vector/TimeStampNanoTZVector.java |  241 --
 .../apache/arrow/vector/TimeStampNanoVector.java   |  236 --
 .../apache/arrow/vector/TimeStampSecTZVector.java  |  238 --
 .../apache/arrow/vector/TimeStampSecVector.java    |  237 --
 .../org/apache/arrow/vector/TimeStampVector.java   |  197 --
 .../org/apache/arrow/vector/TinyIntVector.java     |  390 ---
 .../java/org/apache/arrow/vector/TypeLayout.java   |  446 ---
 .../java/org/apache/arrow/vector/UInt1Vector.java  |  364 ---
 .../java/org/apache/arrow/vector/UInt2Vector.java  |  339 ---
 .../java/org/apache/arrow/vector/UInt4Vector.java  |  334 ---
 .../java/org/apache/arrow/vector/UInt8Vector.java  |  330 ---
 .../java/org/apache/arrow/vector/ValueVector.java  |  285 --
 .../org/apache/arrow/vector/VarBinaryVector.java   |  306 --
 .../org/apache/arrow/vector/VarCharVector.java     |  331 ---
 .../apache/arrow/vector/VariableWidthVector.java   |   53 -
 .../arrow/vector/VectorDefinitionSetter.java       |   26 -
 .../java/org/apache/arrow/vector/VectorLoader.java |  137 -
 .../org/apache/arrow/vector/VectorSchemaRoot.java  |  429 ---
 .../org/apache/arrow/vector/VectorUnloader.java    |  107 -
 .../java/org/apache/arrow/vector/ZeroVector.java   |  107 -
 .../arrow/vector/compare/ApproxEqualsVisitor.java  |  147 -
 .../org/apache/arrow/vector/compare/Range.java     |   85 -
 .../arrow/vector/compare/RangeEqualsVisitor.java   |  550 ----
 .../arrow/vector/compare/TypeEqualsVisitor.java    |  148 -
 .../arrow/vector/compare/VectorEqualsVisitor.java  |   60 -
 .../arrow/vector/compare/VectorValueEqualizer.java |   44 -
 .../apache/arrow/vector/compare/VectorVisitor.java |   58 -
 .../compare/util/ValueEpsilonEqualizers.java       |  149 -
 .../vector/complex/AbstractContainerVector.java    |  140 -
 .../arrow/vector/complex/AbstractStructVector.java |  425 ---
 .../arrow/vector/complex/BaseListVector.java       |   36 -
 .../vector/complex/BaseRepeatedValueVector.java    |  361 ---
 .../arrow/vector/complex/EmptyValuePopulator.java  |   51 -
 .../arrow/vector/complex/FixedSizeListVector.java  |  683 -----
 .../arrow/vector/complex/LargeListVector.java      | 1022 -------
 .../apache/arrow/vector/complex/ListVector.java    |  893 ------
 .../org/apache/arrow/vector/complex/MapVector.java |  122 -
 .../vector/complex/NonNullableStructVector.java    |  440 ---
 .../apache/arrow/vector/complex/Positionable.java  |   29 -
 .../arrow/vector/complex/PromotableVector.java     |   32 -
 .../complex/RepeatedFixedWidthVectorLike.java      |   32 -
 .../arrow/vector/complex/RepeatedValueVector.java  |   46 -
 .../complex/RepeatedVariableWidthVectorLike.java   |   40 -
 .../org/apache/arrow/vector/complex/StateTool.java |   44 -
 .../apache/arrow/vector/complex/StructVector.java  |  600 ----
 .../arrow/vector/complex/VectorWithOrdinal.java    |   34 -
 .../vector/complex/impl/AbstractBaseReader.java    |  112 -
 .../vector/complex/impl/AbstractBaseWriter.java    |   55 -
 .../vector/complex/impl/ComplexWriterImpl.java     |  227 --
 .../complex/impl/NullableStructReaderImpl.java     |   59 -
 .../complex/impl/NullableStructWriterFactory.java  |   48 -
 .../vector/complex/impl/PromotableWriter.java      |  397 ---
 .../vector/complex/impl/SingleListReaderImpl.java  |   91 -
 .../complex/impl/SingleStructReaderImpl.java       |  113 -
 .../complex/impl/StructOrListWriterImpl.java       |  137 -
 .../complex/impl/UnionFixedSizeListReader.java     |  105 -
 .../vector/complex/impl/UnionLargeListReader.java  |  109 -
 .../arrow/vector/complex/impl/UnionListReader.java |  107 -
 .../arrow/vector/complex/impl/UnionMapReader.java  |   77 -
 .../arrow/vector/complex/reader/FieldReader.java   |   32 -
 .../arrow/vector/complex/writer/FieldWriter.java   |   32 -
 .../compression/AbstractCompressionCodec.java      |  116 -
 .../arrow/vector/compression/CompressionCodec.java |   62 -
 .../arrow/vector/compression/CompressionUtil.java  |  103 -
 .../vector/compression/NoCompressionCodec.java     |   67 -
 .../apache/arrow/vector/dictionary/Dictionary.java |   75 -
 .../arrow/vector/dictionary/DictionaryEncoder.java |  196 --
 .../vector/dictionary/DictionaryHashTable.java     |  295 --
 .../vector/dictionary/DictionaryProvider.java      |   62 -
 .../vector/dictionary/ListSubfieldEncoder.java     |  137 -
 .../vector/dictionary/StructSubfieldEncoder.java   |  196 --
 .../apache/arrow/vector/holders/ComplexHolder.java |   28 -
 .../arrow/vector/holders/DenseUnionHolder.java     |   38 -
 .../arrow/vector/holders/RepeatedListHolder.java   |   26 -
 .../arrow/vector/holders/RepeatedStructHolder.java |   26 -
 .../apache/arrow/vector/holders/UnionHolder.java   |   37 -
 .../apache/arrow/vector/holders/ValueHolder.java   |   31 -
 .../apache/arrow/vector/ipc/ArrowFileReader.java   |  230 --
 .../apache/arrow/vector/ipc/ArrowFileWriter.java   |  119 -
 .../org/apache/arrow/vector/ipc/ArrowMagic.java    |   44 -
 .../org/apache/arrow/vector/ipc/ArrowReader.java   |  255 --
 .../apache/arrow/vector/ipc/ArrowStreamReader.java |  229 --
 .../apache/arrow/vector/ipc/ArrowStreamWriter.java |   86 -
 .../org/apache/arrow/vector/ipc/ArrowWriter.java   |  210 --
 .../vector/ipc/InvalidArrowFileException.java      |   30 -
 .../apache/arrow/vector/ipc/JsonFileReader.java    |  783 -----
 .../apache/arrow/vector/ipc/JsonFileWriter.java    |  409 ---
 .../org/apache/arrow/vector/ipc/ReadChannel.java   |  102 -
 .../arrow/vector/ipc/SeekableReadChannel.java      |   43 -
 .../org/apache/arrow/vector/ipc/WriteChannel.java  |  162 --
 .../arrow/vector/ipc/message/ArrowBlock.java       |   95 -
 .../vector/ipc/message/ArrowBodyCompression.java   |   55 -
 .../arrow/vector/ipc/message/ArrowBuffer.java      |   90 -
 .../vector/ipc/message/ArrowDictionaryBatch.java   |   94 -
 .../arrow/vector/ipc/message/ArrowFieldNode.java   |   64 -
 .../arrow/vector/ipc/message/ArrowFooter.java      |  226 --
 .../arrow/vector/ipc/message/ArrowMessage.java     |   42 -
 .../arrow/vector/ipc/message/ArrowRecordBatch.java |  259 --
 .../arrow/vector/ipc/message/FBSerializable.java   |   30 -
 .../arrow/vector/ipc/message/FBSerializables.java  |   67 -
 .../apache/arrow/vector/ipc/message/IpcOption.java |   44 -
 .../vector/ipc/message/MessageChannelReader.java   |   91 -
 .../vector/ipc/message/MessageMetadataResult.java  |  115 -
 .../arrow/vector/ipc/message/MessageResult.java    |   61 -
 .../vector/ipc/message/MessageSerializer.java      |  731 -----
 .../org/apache/arrow/vector/types/DateUnit.java    |   50 -
 .../arrow/vector/types/FloatingPointPrecision.java |   55 -
 .../apache/arrow/vector/types/IntervalUnit.java    |   50 -
 .../apache/arrow/vector/types/MetadataVersion.java |   65 -
 .../org/apache/arrow/vector/types/TimeUnit.java    |   50 -
 .../java/org/apache/arrow/vector/types/Types.java  |  998 -------
 .../org/apache/arrow/vector/types/UnionMode.java   |   57 -
 .../vector/types/pojo/DictionaryEncoding.java      |   88 -
 .../vector/types/pojo/ExtensionTypeRegistry.java   |   42 -
 .../org/apache/arrow/vector/types/pojo/Field.java  |  325 ---
 .../apache/arrow/vector/types/pojo/FieldType.java  |  124 -
 .../org/apache/arrow/vector/types/pojo/Schema.java |  244 --
 .../util/ByteArrayReadableSeekableByteChannel.java |   86 -
 .../arrow/vector/util/ByteFunctionHelpers.java     |  112 -
 .../org/apache/arrow/vector/util/CallBack.java     |   25 -
 .../arrow/vector/util/DataSizeRoundingUtil.java    |   99 -
 .../org/apache/arrow/vector/util/DateUtility.java  |  134 -
 .../apache/arrow/vector/util/DecimalUtility.java   |  188 --
 .../arrow/vector/util/DictionaryUtility.java       |  145 -
 .../util/ElementAddressableVectorIterator.java     |   86 -
 .../arrow/vector/util/JsonStringArrayList.java     |   55 -
 .../arrow/vector/util/JsonStringHashMap.java       |   48 -
 .../apache/arrow/vector/util/MapWithOrdinal.java   |   67 -
 .../arrow/vector/util/MapWithOrdinalImpl.java      |  248 --
 .../arrow/vector/util/MultiMapWithOrdinal.java     |  230 --
 .../vector/util/OversizedAllocationException.java  |   52 -
 .../vector/util/PromotableMultiMapWithOrdinal.java |  133 -
 .../vector/util/SchemaChangeRuntimeException.java  |   48 -
 .../apache/arrow/vector/util/SchemaUtility.java    |   63 -
 .../java/org/apache/arrow/vector/util/Text.java    |  688 -----
 .../org/apache/arrow/vector/util/TransferPair.java |   33 -
 .../org/apache/arrow/vector/util/Validator.java    |  190 --
 .../arrow/vector/util/ValueVectorUtility.java      |  172 --
 .../apache/arrow/vector/util/VectorAppender.java   |  533 ----
 .../arrow/vector/util/VectorBatchAppender.java     |   39 -
 .../vector/util/VectorSchemaRootAppender.java      |   83 -
 .../vector/validate/MetadataV4UnionChecker.java    |   82 -
 .../apache/arrow/vector/validate/ValidateUtil.java |   61 -
 .../validate/ValidateVectorBufferVisitor.java      |  239 --
 .../vector/validate/ValidateVectorDataVisitor.java |  173 --
 .../vector/validate/ValidateVectorTypeVisitor.java |  356 ---
 .../vector/validate/ValidateVectorVisitor.java     |  266 --
 .../java/org/apache/arrow/util/TestSchemaUtil.java |   51 -
 .../apache/arrow/vector/DirtyRootAllocator.java    |   52 -
 .../org/apache/arrow/vector/ITTestLargeVector.java |  280 --
 .../org/apache/arrow/vector/TestBitVector.java     |  543 ----
 .../apache/arrow/vector/TestBitVectorHelper.java   |  235 --
 .../arrow/vector/TestBufferOwnershipTransfer.java  |  131 -
 .../java/org/apache/arrow/vector/TestCopyFrom.java | 1104 -------
 .../apache/arrow/vector/TestDecimal256Vector.java  |  357 ---
 .../org/apache/arrow/vector/TestDecimalVector.java |  365 ---
 .../apache/arrow/vector/TestDenseUnionVector.java  |  625 ----
 .../apache/arrow/vector/TestDictionaryVector.java  | 1031 -------
 .../apache/arrow/vector/TestDurationVector.java    |  137 -
 .../arrow/vector/TestFixedSizeBinaryVector.java    |  279 --
 .../arrow/vector/TestFixedSizeListVector.java      |  506 ----
 .../arrow/vector/TestIntervalYearVector.java       |   58 -
 .../apache/arrow/vector/TestLargeListVector.java   |  982 -------
 .../arrow/vector/TestLargeVarBinaryVector.java     |  104 -
 .../arrow/vector/TestLargeVarCharVector.java       |  816 ------
 .../org/apache/arrow/vector/TestListVector.java    |  981 -------
 .../org/apache/arrow/vector/TestMapVector.java     |  702 -----
 .../arrow/vector/TestNullCheckingForGet.java       |   92 -
 .../vector/TestOutOfMemoryForValueVector.java      |   73 -
 .../TestOversizedAllocationForValueVector.java     |  132 -
 .../apache/arrow/vector/TestSplitAndTransfer.java  |  410 ---
 .../org/apache/arrow/vector/TestStructVector.java  |  183 --
 .../org/apache/arrow/vector/TestTypeLayout.java    |   98 -
 .../org/apache/arrow/vector/TestUnionVector.java   |  433 ---
 .../java/org/apache/arrow/vector/TestUtils.java    |   45 -
 .../org/apache/arrow/vector/TestValueVector.java   | 3023 --------------------
 .../apache/arrow/vector/TestVarCharListVector.java |   77 -
 .../org/apache/arrow/vector/TestVectorAlloc.java   |  106 -
 .../org/apache/arrow/vector/TestVectorReAlloc.java |  474 ---
 .../org/apache/arrow/vector/TestVectorReset.java   |  168 --
 .../apache/arrow/vector/TestVectorSchemaRoot.java  |  318 --
 .../apache/arrow/vector/TestVectorUnloadLoad.java  |  332 ---
 .../vector/compare/TestRangeEqualsVisitor.java     |  739 -----
 .../vector/compare/TestTypeEqualsVisitor.java      |  185 --
 .../vector/complex/impl/TestComplexCopier.java     |  763 -----
 .../vector/complex/impl/TestPromotableWriter.java  |  167 --
 .../vector/complex/writer/TestComplexWriter.java   | 1258 --------
 .../org/apache/arrow/vector/ipc/BaseFileTest.java  |  850 ------
 .../vector/ipc/ITTestIPCWithLargeArrowBuffers.java |  187 --
 .../arrow/vector/ipc/MessageSerializerTest.java    |  227 --
 .../org/apache/arrow/vector/ipc/TestArrowFile.java |  134 -
 .../apache/arrow/vector/ipc/TestArrowFooter.java   |   68 -
 .../arrow/vector/ipc/TestArrowReaderWriter.java    |  882 ------
 .../apache/arrow/vector/ipc/TestArrowStream.java   |  147 -
 .../arrow/vector/ipc/TestArrowStreamPipe.java      |  161 --
 .../org/apache/arrow/vector/ipc/TestJSONFile.java  |  458 ---
 .../org/apache/arrow/vector/ipc/TestRoundTrip.java |  628 ----
 .../vector/ipc/TestUIntDictionaryRoundTrip.java    |  246 --
 .../ipc/message/TestMessageMetadataResult.java     |   36 -
 .../org/apache/arrow/vector/pojo/TestConvert.java  |  169 --
 .../arrow/vector/testing/RandomDataGenerator.java  |   44 -
 .../vector/testing/TestValueVectorPopulator.java   |  604 ----
 .../vector/testing/ValueVectorDataPopulator.java   |  708 -----
 .../arrow/vector/types/pojo/TestExtensionType.java |  240 --
 .../apache/arrow/vector/types/pojo/TestField.java  |   63 -
 .../apache/arrow/vector/types/pojo/TestSchema.java |  254 --
 .../arrow/vector/util/DecimalUtilityTest.java      |  127 -
 .../vector/util/TestDataSizeRoundingUtil.java      |   76 -
 .../util/TestElementAddressableVectorIterator.java |  134 -
 .../arrow/vector/util/TestMultiMapWithOrdinal.java |   60 -
 .../apache/arrow/vector/util/TestValidator.java    |   56 -
 .../arrow/vector/util/TestVectorAppender.java      |  794 -----
 .../arrow/vector/util/TestVectorBatchAppender.java |   72 -
 .../vector/util/TestVectorSchemaRootAppender.java  |  161 --
 .../arrow/vector/validate/TestValidateVector.java  |  260 --
 .../vector/validate/TestValidateVectorFull.java    |  234 --
 .../validate/TestValidateVectorSchemaRoot.java     |  101 -
 .../validate/TestValidateVectorTypeVisitor.java    |  301 --
 java/vector/src/test/resources/logback.xml         |   28 -
 930 files changed, 151922 deletions(-)

diff --git a/java/.gitattributes b/java/.gitattributes
deleted file mode 100644
index 5966153..0000000
--- a/java/.gitattributes
+++ /dev/null
@@ -1,2 +0,0 @@
-.gitattributes export-ignore
-.gitignore export-ignore
diff --git a/java/.gitignore b/java/.gitignore
deleted file mode 100644
index 03f5bf7..0000000
--- a/java/.gitignore
+++ /dev/null
@@ -1,23 +0,0 @@
-.project
-.buildpath
-.classpath
-.checkstyle
-.settings/
-.idea/
-TAGS
-*.log
-*.lck
-*.iml
-target/
-*.DS_Store
-*.patch
-*~
-git.properties
-contrib/native/client/build/
-contrib/native/client/build/*
-CMakeCache.txt
-CMakeFiles
-Makefile
-cmake_install.cmake
-install_manifest.txt
-?/
diff --git a/java/README.md b/java/README.md
deleted file mode 100644
index 29d1fcf..0000000
--- a/java/README.md
+++ /dev/null
@@ -1,164 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Arrow Java
-
-## Getting Started
-
-The following guides explain the fundamental data structures used in the Java implementation of Apache Arrow.
-
-- [ValueVector](https://arrow.apache.org/docs/java/vector.html) is an abstraction that is used to store a sequence of values having the same type in an individual column.
-- [VectorSchemaRoot](https://arrow.apache.org/docs/java/vector_schema_root.html) is a container that can hold multiple vectors based on a schema.
-- The [Reading/Writing IPC formats](https://arrow.apache.org/docs/java/ipc.html) guide explains how to stream record batches as well as serializing record batches to files.
-
-Generated javadoc documentation is available [here](https://arrow.apache.org/docs/java/).
-
-## Setup Build Environment
-
-install:
- - Java 8 or later
- - Maven 3.3 or later
-
-## Building and running tests
-
-```
-git submodule update --init --recursive # Needed for flight
-cd java
-mvn install
-```
-## Building and running tests for arrow jni modules like gandiva and orc (optional)
-
-[Arrow Cpp][2] must be built before this step. The cpp build directory must
-be provided as the value for argument arrow.cpp.build.dir. eg.
-
-```
-cd java
-mvn install -P arrow-jni -am -Darrow.cpp.build.dir=../../release
-```
-
-The gandiva library is still in Alpha stages, and subject to API changes without
-deprecation warnings.
-
-## Flatbuffers dependency
-
-Arrow uses Google's Flatbuffers to transport metadata.  The java version of the library
-requires the generated flatbuffer classes can only be used with the same version that
-generated them.  Arrow packages a version of the arrow-vector module that shades flatbuffers
-and arrow-format into a single JAR.  Using the classifier "shade-format-flatbuffers" in your
-pom.xml will make use of this JAR, you can then exclude/resolve the original dependency to
-a version of your choosing.
-
-### Updating the flatbuffers generated code
-
-1. Verify that your version of flatc matches the declared dependency:
-
-```bash
-$ flatc --version
-flatc version 1.12.0
-
-$ grep "dep.fbs.version" java/pom.xml
-    <dep.fbs.version>1.12.0</dep.fbs.version>
-```
-
-2. Generate the flatbuffer java files by performing the following:
-
-```bash
-cd $ARROW_HOME
-
-# remove the existing files
-rm -rf java/format/src
-
-# regenerate from the .fbs files
-flatc --java -o java/format/src/main/java format/*.fbs
-
-# prepend license header
-find java/format/src -type f | while read file; do
-  (cat header | while read line; do echo "// $line"; done; cat $file) > $file.tmp
-  mv $file.tmp $file
-done
-```
-
-## Performance Tuning
-
-There are several system/environmental variables that users can configure.  These trade off safety (they turn off checking) for speed.  Typically they are only used in production settings after the code has been thoroughly tested without using them.
-
-* Bounds Checking for memory accesses: Bounds checking is on by default.  You can disable it by setting either the
-system property("arrow.enable_unsafe_memory_access") or the environmental variable
-("ARROW_ENABLE_UNSAFE_MEMORY_ACCESS") to "true". When both the system property and the environmental
-variable are set, the system property takes precedence.
-
-* null checking for gets: ValueVector get methods (not getObject) methods by default verify the slot is not null.  You can disable it by setting either the
-system property("arrow.enable_null_check_for_get") or the environmental variable
-("ARROW_ENABLE_NULL_CHECK_FOR_GET") to "false". When both the system property and the environmental
-variable are set, the system property takes precedence.
-
-## Java Properties
-
- * For java 9 or later, should set "-Dio.netty.tryReflectionSetAccessible=true".
-This fixes `java.lang.UnsupportedOperationException: sun.misc.Unsafe or java.nio.DirectByteBuffer.(long, int) not available`. thrown by netty.
- * To support duplicate fields in a `StructVector` enable "-Darrow.struct.conflict.policy=CONFLICT_APPEND".
-Duplicate fields are ignored (`CONFLICT_REPLACE`) by default and overwritten. To support different policies for
-conflicting or duplicate fields set this JVM flag or use the correct static constructor methods for `StructVector`s.
-
-## Java Code Style Guide
-
-Arrow Java follows the Google style guide [here][3] with the following
-differences:
-
-* Imports are grouped, from top to bottom, in this order: static imports,
-standard Java, org.\*, com.\*
-* Line length can be up to 120 characters
-* Operators for line wrapping are at end-of-line
-* Naming rules for methods, parameters, etc. have been relaxed
-* Disabled `NoFinalizer`, `OverloadMethodsDeclarationOrder`, and
-`VariableDeclarationUsageDistance` due to the existing code base. These rules
-should be followed when possible.
-
-Refer to `java/dev/checkstyle/checkstyle.xml for rule specifics.
-
-## Test Logging Configuration
-
-When running tests, Arrow Java uses the Logback logger with SLF4J. By default,
-it uses the logback.xml present in the corresponding module's src/test/resources
-directory, which has the default log level set to INFO.
-Arrow Java can be built with an alternate logback configuration file using the
-following command run in the project root directory:
-
-```bash
-mvn -Dlogback.configurationFile=file:<path-of-logback-file>
-```
-
-See [Logback Configuration][1] for more details.
-
-## Integration Tests
-
-Integration tests which require more time or more memory can be run by activating
-the `integration-tests` profile. This activates the [maven failsafe][4] plugin
-and any class prefixed with `IT` will be run during the testing phase. The integration
-tests currently require a larger amount of memory (>4GB) and time to complete. To activate
-the profile:
-
-```bash
-mvn -Pintegration-tests <rest of mvn arguments>
-```
-
-[1]: https://logback.qos.ch/manual/configuration.html
-[2]: https://github.com/apache/arrow/blob/master/cpp/README.md
-[3]: http://google.github.io/styleguide/javaguide.html
-[4]: https://maven.apache.org/surefire/maven-failsafe-plugin/
diff --git a/java/adapter/avro/pom.xml b/java/adapter/avro/pom.xml
deleted file mode 100644
index d2b242f..0000000
--- a/java/adapter/avro/pom.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor
-    license agreements. See the NOTICE file distributed with this work for additional
-    information regarding copyright ownership. The ASF licenses this file to
-    You under the Apache License, Version 2.0 (the "License"); you may not use
-    this file except in compliance with the License. You may obtain a copy of
-    the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
-    by applicable law or agreed to in writing, software distributed under the
-    License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
-    OF ANY KIND, either express or implied. See the License for the specific
-    language governing permissions and limitations under the License. -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>org.apache.arrow</groupId>
-    <artifactId>arrow-java-root</artifactId>
-    <version>4.0.0-SNAPSHOT</version>
-    <relativePath>../../pom.xml</relativePath>
-  </parent>
-
-  <artifactId>arrow-avro</artifactId>
-  <name>Arrow AVRO Adapter</name>
-  <description>(Contrib/Experimental) A library for converting Avro data to Arrow data.</description>
-  <url>http://maven.apache.org</url>
-
-  <dependencies>
-
-    <!-- https://mvnrepository.com/artifact/org.apache.arrow/arrow-memory-core -->
-    <dependency>
-      <groupId>org.apache.arrow</groupId>
-      <artifactId>arrow-memory-core</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-
-    <!-- https://mvnrepository.com/artifact/org.apache.arrow/arrow-memory-netty -->
-    <dependency>
-      <groupId>org.apache.arrow</groupId>
-      <artifactId>arrow-memory-netty</artifactId>
-      <version>${project.version}</version>
-      <scope>runtime</scope>
-    </dependency>
-
-    <!-- https://mvnrepository.com/artifact/org.apache.arrow/arrow-vector -->
-    <dependency>
-      <groupId>org.apache.arrow</groupId>
-      <artifactId>arrow-vector</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.avro</groupId>
-      <artifactId>avro</artifactId>
-      <version>${dep.avro.version}</version>
-    </dependency>
-  </dependencies>
-
-</project>
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrow.java b/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrow.java
deleted file mode 100644
index 9fb5ce2..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrow.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import java.io.IOException;
-
-import org.apache.arrow.util.Preconditions;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.avro.Schema;
-import org.apache.avro.io.Decoder;
-
-/**
- * Utility class to convert Avro objects to columnar Arrow format objects.
- */
-public class AvroToArrow {
-
-  /**
-   * Fetch the data from {@link Decoder} and convert it to Arrow objects.
-   * Only for testing purpose.
-   * @param schema avro schema.
-   * @param decoder avro decoder
-   * @param config configuration of the conversion.
-   * @return Arrow Data Objects {@link VectorSchemaRoot}
-   */
-  static VectorSchemaRoot avroToArrow(Schema schema, Decoder decoder, AvroToArrowConfig config)
-      throws IOException {
-    Preconditions.checkNotNull(schema, "Avro schema object can not be null");
-    Preconditions.checkNotNull(decoder, "Avro decoder object can not be null");
-    Preconditions.checkNotNull(config, "config can not be null");
-
-    return AvroToArrowUtils.avroToArrowVectors(schema, decoder, config);
-  }
-
-  /**
-   * Fetch the data from {@link Decoder} and iteratively convert it to Arrow objects.
-   * @param schema avro schema
-   * @param decoder avro decoder
-   * @param config configuration of the conversion.
-   * @throws IOException on error
-   */
-  public static AvroToArrowVectorIterator avroToArrowIterator(
-      Schema schema,
-      Decoder decoder,
-      AvroToArrowConfig config) throws IOException {
-
-    Preconditions.checkNotNull(schema, "Avro schema object can not be null");
-    Preconditions.checkNotNull(decoder, "Avro decoder object can not be null");
-    Preconditions.checkNotNull(config, "config can not be null");
-
-    return AvroToArrowVectorIterator.create(decoder, schema, config);
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowConfig.java b/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowConfig.java
deleted file mode 100644
index 4f59ef3..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowConfig.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import java.util.Set;
-
-import org.apache.arrow.memory.BufferAllocator;
-import org.apache.arrow.util.Preconditions;
-import org.apache.arrow.vector.dictionary.DictionaryProvider;
-
-/**
- * This class configures the Avro-to-Arrow conversion process.
- */
-public class AvroToArrowConfig {
-
-  private final BufferAllocator allocator;
-  /**
-   * The maximum rowCount to read each time when partially convert data.
-   * Default value is 1024 and -1 means read all data into one vector.
-   */
-  private final int targetBatchSize;
-
-  /**
-   * The dictionary provider used for enum type.
-   * If avro schema has enum type, will create dictionary and update this provider.
-   */
-  private final DictionaryProvider.MapDictionaryProvider provider;
-
-  /**
-   * The field names which to skip when reading decoder values.
-   */
-  private final Set<String> skipFieldNames;
-
-  /**
-   * Instantiate an instance.
-   * @param allocator The memory allocator to construct the Arrow vectors with.
-   * @param targetBatchSize The maximum rowCount to read each time when partially convert data.
-   * @param provider The dictionary provider used for enum type, adapter will update this provider.
-   * @param skipFieldNames Field names which to skip.
-   */
-  AvroToArrowConfig(
-      BufferAllocator allocator,
-      int targetBatchSize,
-      DictionaryProvider.MapDictionaryProvider provider,
-      Set<String> skipFieldNames) {
-
-    Preconditions.checkArgument(targetBatchSize == AvroToArrowVectorIterator.NO_LIMIT_BATCH_SIZE ||
-        targetBatchSize > 0, "invalid targetBatchSize: %s", targetBatchSize);
-
-    this.allocator = allocator;
-    this.targetBatchSize = targetBatchSize;
-    this.provider = provider;
-    this.skipFieldNames = skipFieldNames;
-  }
-
-  public BufferAllocator getAllocator() {
-    return allocator;
-  }
-
-  public int getTargetBatchSize() {
-    return targetBatchSize;
-  }
-
-  public DictionaryProvider.MapDictionaryProvider getProvider() {
-    return provider;
-  }
-
-  public Set<String> getSkipFieldNames() {
-    return skipFieldNames;
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowConfigBuilder.java b/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowConfigBuilder.java
deleted file mode 100644
index 474c1eb..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowConfigBuilder.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.arrow.memory.BufferAllocator;
-import org.apache.arrow.vector.dictionary.DictionaryProvider;
-
-/**
- * This class builds {@link AvroToArrowConfig}s.
- */
-public class AvroToArrowConfigBuilder {
-
-  private BufferAllocator allocator;
-
-  private int targetBatchSize;
-
-  private DictionaryProvider.MapDictionaryProvider provider;
-
-  private Set<String> skipFieldNames;
-
-  /**
-   * Default constructor for the {@link AvroToArrowConfigBuilder}.
-   */
-  public AvroToArrowConfigBuilder(BufferAllocator allocator) {
-    this.allocator = allocator;
-    this.targetBatchSize = AvroToArrowVectorIterator.DEFAULT_BATCH_SIZE;
-    this.provider = new DictionaryProvider.MapDictionaryProvider();
-    this.skipFieldNames = new HashSet<>();
-  }
-
-  public AvroToArrowConfigBuilder setTargetBatchSize(int targetBatchSize) {
-    this.targetBatchSize = targetBatchSize;
-    return this;
-  }
-
-  public AvroToArrowConfigBuilder setProvider(DictionaryProvider.MapDictionaryProvider provider) {
-    this.provider = provider;
-    return this;
-  }
-
-  public AvroToArrowConfigBuilder setSkipFieldNames(Set<String> skipFieldNames) {
-    this.skipFieldNames = skipFieldNames;
-    return this;
-  }
-
-  /**
-   * This builds the {@link AvroToArrowConfig} from the provided params.
-   */
-  public AvroToArrowConfig build() {
-    return new AvroToArrowConfig(
-        allocator,
-        targetBatchSize,
-        provider,
-        skipFieldNames);
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowUtils.java b/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowUtils.java
deleted file mode 100644
index 80293c8..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowUtils.java
+++ /dev/null
@@ -1,805 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import static org.apache.arrow.vector.types.FloatingPointPrecision.DOUBLE;
-import static org.apache.arrow.vector.types.FloatingPointPrecision.SINGLE;
-
-import java.io.EOFException;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-import org.apache.arrow.consumers.AvroArraysConsumer;
-import org.apache.arrow.consumers.AvroBooleanConsumer;
-import org.apache.arrow.consumers.AvroBytesConsumer;
-import org.apache.arrow.consumers.AvroDoubleConsumer;
-import org.apache.arrow.consumers.AvroEnumConsumer;
-import org.apache.arrow.consumers.AvroFixedConsumer;
-import org.apache.arrow.consumers.AvroFloatConsumer;
-import org.apache.arrow.consumers.AvroIntConsumer;
-import org.apache.arrow.consumers.AvroLongConsumer;
-import org.apache.arrow.consumers.AvroMapConsumer;
-import org.apache.arrow.consumers.AvroNullConsumer;
-import org.apache.arrow.consumers.AvroStringConsumer;
-import org.apache.arrow.consumers.AvroStructConsumer;
-import org.apache.arrow.consumers.AvroUnionsConsumer;
-import org.apache.arrow.consumers.CompositeAvroConsumer;
-import org.apache.arrow.consumers.Consumer;
-import org.apache.arrow.consumers.SkipConsumer;
-import org.apache.arrow.consumers.SkipFunction;
-import org.apache.arrow.consumers.logical.AvroDateConsumer;
-import org.apache.arrow.consumers.logical.AvroDecimalConsumer;
-import org.apache.arrow.consumers.logical.AvroTimeMicroConsumer;
-import org.apache.arrow.consumers.logical.AvroTimeMillisConsumer;
-import org.apache.arrow.consumers.logical.AvroTimestampMicrosConsumer;
-import org.apache.arrow.consumers.logical.AvroTimestampMillisConsumer;
-import org.apache.arrow.memory.BufferAllocator;
-import org.apache.arrow.util.Preconditions;
-import org.apache.arrow.vector.BaseIntVector;
-import org.apache.arrow.vector.BigIntVector;
-import org.apache.arrow.vector.BitVector;
-import org.apache.arrow.vector.DateDayVector;
-import org.apache.arrow.vector.DecimalVector;
-import org.apache.arrow.vector.FieldVector;
-import org.apache.arrow.vector.FixedSizeBinaryVector;
-import org.apache.arrow.vector.Float4Vector;
-import org.apache.arrow.vector.Float8Vector;
-import org.apache.arrow.vector.IntVector;
-import org.apache.arrow.vector.NullVector;
-import org.apache.arrow.vector.TimeMicroVector;
-import org.apache.arrow.vector.TimeMilliVector;
-import org.apache.arrow.vector.TimeStampMicroVector;
-import org.apache.arrow.vector.TimeStampMilliVector;
-import org.apache.arrow.vector.VarBinaryVector;
-import org.apache.arrow.vector.VarCharVector;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.complex.ListVector;
-import org.apache.arrow.vector.complex.MapVector;
-import org.apache.arrow.vector.complex.StructVector;
-import org.apache.arrow.vector.complex.UnionVector;
-import org.apache.arrow.vector.dictionary.Dictionary;
-import org.apache.arrow.vector.dictionary.DictionaryEncoder;
-import org.apache.arrow.vector.dictionary.DictionaryProvider;
-import org.apache.arrow.vector.types.DateUnit;
-import org.apache.arrow.vector.types.TimeUnit;
-import org.apache.arrow.vector.types.Types;
-import org.apache.arrow.vector.types.UnionMode;
-import org.apache.arrow.vector.types.pojo.ArrowType;
-import org.apache.arrow.vector.types.pojo.DictionaryEncoding;
-import org.apache.arrow.vector.types.pojo.Field;
-import org.apache.arrow.vector.types.pojo.FieldType;
-import org.apache.arrow.vector.util.JsonStringArrayList;
-import org.apache.arrow.vector.util.ValueVectorUtility;
-import org.apache.avro.LogicalType;
-import org.apache.avro.LogicalTypes;
-import org.apache.avro.Schema;
-import org.apache.avro.Schema.Type;
-import org.apache.avro.io.Decoder;
-
-/**
- * Class that does most of the work to convert Avro data into Arrow columnar format Vector objects.
- */
-public class AvroToArrowUtils {
-
-  /**
-   * Creates a {@link Consumer} from the {@link Schema}
-   *
-   <p>This method currently performs following type mapping for Avro data types to corresponding Arrow data types.
-   *
-   * <ul>
-   *   <li>STRING --> ArrowType.Utf8</li>
-   *   <li>INT --> ArrowType.Int(32, signed)</li>
-   *   <li>LONG --> ArrowType.Int(64, signed)</li>
-   *   <li>FLOAT --> ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE)</li>
-   *   <li>DOUBLE --> ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE)</li>
-   *   <li>BOOLEAN --> ArrowType.Bool</li>
-   *   <li>BYTES --> ArrowType.Binary</li>
-   *   <li>ARRAY --> ArrowType.List</li>
-   *   <li>MAP --> ArrowType.Map</li>
-   *   <li>FIXED --> ArrowType.FixedSizeBinary</li>
-   *   <li>RECORD --> ArrowType.Struct</li>
-   *   <li>UNION --> ArrowType.Union</li>
-   *   <li>ENUM--> ArrowType.Int</li>
-   *   <li>DECIMAL --> ArrowType.Decimal</li>
-   *   <li>Date --> ArrowType.Date(DateUnit.DAY)</li>
-   *   <li>TimeMillis --> ArrowType.Time(TimeUnit.MILLISECOND, 32)</li>
-   *   <li>TimeMicros --> ArrowType.Time(TimeUnit.MICROSECOND, 64)</li>
-   *   <li>TimestampMillis --> ArrowType.Timestamp(TimeUnit.MILLISECOND, null)</li>
-   *   <li>TimestampMicros --> ArrowType.Timestamp(TimeUnit.MICROSECOND, null)</li>
-   * </ul>
-   */
-
-  private static Consumer createConsumer(Schema schema, String name, AvroToArrowConfig config) {
-    return createConsumer(schema, name, false, config, null);
-  }
-
-  private static Consumer createConsumer(Schema schema, String name, AvroToArrowConfig config, FieldVector vector) {
-    return createConsumer(schema, name, false, config, vector);
-  }
-
-  /**
-   * Create a consumer with the given Avro schema.
-   *
-   * @param schema avro schema
-   * @param name arrow field name
-   * @param consumerVector vector to keep in consumer, if v == null, will create a new vector via field.
-   * @return consumer
-   */
-  private static Consumer createConsumer(
-      Schema schema,
-      String name,
-      boolean nullable,
-      AvroToArrowConfig config,
-      FieldVector consumerVector) {
-
-    Preconditions.checkNotNull(schema, "Avro schema object can't be null");
-    Preconditions.checkNotNull(config, "Config can't be null");
-
-    final BufferAllocator allocator = config.getAllocator();
-
-    final Type type = schema.getType();
-    final LogicalType logicalType = schema.getLogicalType();
-
-    final ArrowType arrowType;
-    final FieldType fieldType;
-    final FieldVector vector;
-    final Consumer consumer;
-
-    switch (type) {
-      case UNION:
-        consumer = createUnionConsumer(schema, name, config, consumerVector);
-        break;
-      case ARRAY:
-        consumer = createArrayConsumer(schema, name, config, consumerVector);
-        break;
-      case MAP:
-        consumer = createMapConsumer(schema, name, config, consumerVector);
-        break;
-      case RECORD:
-        consumer = createStructConsumer(schema, name, config, consumerVector);
-        break;
-      case ENUM:
-        consumer = createEnumConsumer(schema, name, config, consumerVector);
-        break;
-      case STRING:
-        arrowType = new ArrowType.Utf8();
-        fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-        vector = createVector(consumerVector, fieldType, name, allocator);
-        consumer = new AvroStringConsumer((VarCharVector) vector);
-        break;
-      case FIXED:
-        Map<String, String> extProps = createExternalProps(schema);
-        if (logicalType instanceof LogicalTypes.Decimal) {
-          arrowType = createDecimalArrowType((LogicalTypes.Decimal) logicalType);
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema, extProps));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroDecimalConsumer.FixedDecimalConsumer((DecimalVector) vector, schema.getFixedSize());
-        } else {
-          arrowType = new ArrowType.FixedSizeBinary(schema.getFixedSize());
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema, extProps));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroFixedConsumer((FixedSizeBinaryVector) vector, schema.getFixedSize());
-        }
-        break;
-      case INT:
-        if (logicalType instanceof LogicalTypes.Date) {
-          arrowType = new ArrowType.Date(DateUnit.DAY);
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroDateConsumer((DateDayVector) vector);
-        } else if (logicalType instanceof LogicalTypes.TimeMillis) {
-          arrowType = new ArrowType.Time(TimeUnit.MILLISECOND, 32);
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroTimeMillisConsumer((TimeMilliVector) vector);
-        } else {
-          arrowType = new ArrowType.Int(32, /*signed=*/true);
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroIntConsumer((IntVector) vector);
-        }
-        break;
-      case BOOLEAN:
-        arrowType = new ArrowType.Bool();
-        fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-        vector = createVector(consumerVector, fieldType, name, allocator);
-        consumer = new AvroBooleanConsumer((BitVector) vector);
-        break;
-      case LONG:
-        if (logicalType instanceof LogicalTypes.TimeMicros) {
-          arrowType = new ArrowType.Time(TimeUnit.MICROSECOND, 64);
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroTimeMicroConsumer((TimeMicroVector) vector);
-        } else if (logicalType instanceof LogicalTypes.TimestampMillis) {
-          arrowType = new ArrowType.Timestamp(TimeUnit.MILLISECOND, null);
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroTimestampMillisConsumer((TimeStampMilliVector) vector);
-        } else if (logicalType instanceof LogicalTypes.TimestampMicros) {
-          arrowType = new ArrowType.Timestamp(TimeUnit.MICROSECOND, null);
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroTimestampMicrosConsumer((TimeStampMicroVector) vector);
-        } else {
-          arrowType = new ArrowType.Int(64, /*signed=*/true);
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroLongConsumer((BigIntVector) vector);
-        }
-        break;
-      case FLOAT:
-        arrowType = new ArrowType.FloatingPoint(SINGLE);
-        fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-        vector = createVector(consumerVector, fieldType, name, allocator);
-        consumer = new AvroFloatConsumer((Float4Vector) vector);
-        break;
-      case DOUBLE:
-        arrowType = new ArrowType.FloatingPoint(DOUBLE);
-        fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-        vector = createVector(consumerVector, fieldType, name, allocator);
-        consumer = new AvroDoubleConsumer((Float8Vector) vector);
-        break;
-      case BYTES:
-        if (logicalType instanceof LogicalTypes.Decimal) {
-          arrowType = createDecimalArrowType((LogicalTypes.Decimal) logicalType);
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroDecimalConsumer.BytesDecimalConsumer((DecimalVector) vector);
-        } else {
-          arrowType = new ArrowType.Binary();
-          fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-          vector = createVector(consumerVector, fieldType, name, allocator);
-          consumer = new AvroBytesConsumer((VarBinaryVector) vector);
-        }
-        break;
-      case NULL:
-        arrowType = new ArrowType.Null();
-        fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
-        vector = fieldType.createNewSingleVector(name, allocator, /*schemaCallback=*/null);
-        consumer = new AvroNullConsumer((NullVector) vector);
-        break;
-      default:
-        // no-op, shouldn't get here
-        throw new UnsupportedOperationException("Can't convert avro type %s to arrow type." + type.getName());
-    }
-    return consumer;
-  }
-
-  private static ArrowType createDecimalArrowType(LogicalTypes.Decimal logicalType) {
-    final int scale = logicalType.getScale();
-    final int precision = logicalType.getPrecision();
-    Preconditions.checkArgument(precision > 0 && precision <= 38,
-        "Precision must be in range of 1 to 38");
-    Preconditions.checkArgument(scale >= 0 && scale <= 38,
-        "Scale must be in range of 0 to 38.");
-    Preconditions.checkArgument(scale <= precision,
-        "Invalid decimal scale: %s (greater than precision: %s)", scale, precision);
-
-    return new ArrowType.Decimal(precision, scale, 128);
-
-  }
-
-  private static Consumer createSkipConsumer(Schema schema) {
-
-    SkipFunction skipFunction;
-    Type type = schema.getType();
-
-    switch (type) {
-      case UNION:
-        List<Consumer> unionDelegates = schema.getTypes().stream().map(s ->
-            createSkipConsumer(s)).collect(Collectors.toList());
-        skipFunction = decoder -> unionDelegates.get(decoder.readInt()).consume(decoder);
-
-        break;
-      case ARRAY:
-        Consumer elementDelegate = createSkipConsumer(schema.getElementType());
-        skipFunction = decoder -> {
-          for (long i = decoder.skipArray(); i != 0; i = decoder.skipArray()) {
-            for (long j = 0; j < i; j++) {
-              elementDelegate.consume(decoder);
-            }
-          }
-        };
-        break;
-      case MAP:
-        Consumer valueDelegate = createSkipConsumer(schema.getValueType());
-        skipFunction = decoder -> {
-          for (long i = decoder.skipMap(); i != 0; i = decoder.skipMap()) {
-            for (long j = 0; j < i; j++) {
-              decoder.skipString(); // Discard key
-              valueDelegate.consume(decoder);
-            }
-          }
-        };
-        break;
-      case RECORD:
-        List<Consumer> delegates = schema.getFields().stream().map(field ->
-            createSkipConsumer(field.schema())).collect(Collectors.toList());
-
-        skipFunction = decoder -> {
-          for (Consumer consumer : delegates) {
-            consumer.consume(decoder);
-          }
-        };
-
-        break;
-      case ENUM:
-        skipFunction = decoder -> decoder.readEnum();
-        break;
-      case STRING:
-        skipFunction = decoder -> decoder.skipString();
-        break;
-      case FIXED:
-        skipFunction = decoder -> decoder.skipFixed(schema.getFixedSize());
-        break;
-      case INT:
-        skipFunction = decoder -> decoder.readInt();
-        break;
-      case BOOLEAN:
-        skipFunction = decoder -> decoder.skipFixed(1);
-        break;
-      case LONG:
-        skipFunction = decoder -> decoder.readLong();
-        break;
-      case FLOAT:
-        skipFunction = decoder -> decoder.readFloat();
-        break;
-      case DOUBLE:
-        skipFunction = decoder -> decoder.readDouble();
-        break;
-      case BYTES:
-        skipFunction = decoder -> decoder.skipBytes();
-        break;
-      case NULL:
-        skipFunction = decoder -> { };
-        break;
-      default:
-        // no-op, shouldn't get here
-        throw new UnsupportedOperationException("Invalid avro type: " + type.getName());
-    }
-
-    return new SkipConsumer(skipFunction);
-  }
-
-  static CompositeAvroConsumer createCompositeConsumer(
-      Schema schema, AvroToArrowConfig config) {
-
-    List<Consumer> consumers = new ArrayList<>();
-    final Set<String> skipFieldNames = config.getSkipFieldNames();
-
-    Schema.Type type = schema.getType();
-    if (type == Type.RECORD) {
-      for (Schema.Field field : schema.getFields()) {
-        if (skipFieldNames.contains(field.name())) {
-          consumers.add(createSkipConsumer(field.schema()));
-        } else {
-          Consumer consumer = createConsumer(field.schema(), field.name(), config);
-          consumers.add(consumer);
-        }
-
-      }
-    } else {
-      Consumer consumer = createConsumer(schema, "", config);
-      consumers.add(consumer);
-    }
-
-    return new CompositeAvroConsumer(consumers);
-  }
-
-  private static FieldVector createVector(FieldVector consumerVector, FieldType fieldType,
-      String name, BufferAllocator allocator) {
-    return consumerVector != null ? consumerVector : fieldType.createNewSingleVector(name, allocator, null);
-  }
-
-  private static String getDefaultFieldName(ArrowType type) {
-    Types.MinorType minorType = Types.getMinorTypeForArrowType(type);
-    return minorType.name().toLowerCase();
-  }
-
-  private static Field avroSchemaToField(Schema schema, String name, AvroToArrowConfig config) {
-    return avroSchemaToField(schema, name, config, null);
-  }
-
-  private static Field avroSchemaToField(
-      Schema schema,
-      String name,
-      AvroToArrowConfig config,
-      Map<String, String> externalProps) {
-
-    final Type type = schema.getType();
-    final LogicalType logicalType = schema.getLogicalType();
-    final List<Field> children = new ArrayList<>();
-    final FieldType fieldType;
-
-    switch (type) {
-      case UNION:
-        for (int i = 0; i < schema.getTypes().size(); i++) {
-          Schema childSchema = schema.getTypes().get(i);
-          // Union child vector should use default name
-          children.add(avroSchemaToField(childSchema, null, config));
-        }
-        fieldType = createFieldType(new ArrowType.Union(UnionMode.Sparse, null), schema, externalProps);
-        break;
-      case ARRAY:
-        Schema elementSchema = schema.getElementType();
-        children.add(avroSchemaToField(elementSchema, elementSchema.getName(), config));
-        fieldType = createFieldType(new ArrowType.List(), schema, externalProps);
-        break;
-      case MAP:
-        // MapVector internal struct field and key field should be non-nullable
-        FieldType keyFieldType = new FieldType(/*nullable=*/false, new ArrowType.Utf8(), /*dictionary=*/null);
-        Field keyField = new Field("key", keyFieldType, /*children=*/null);
-        Field valueField = avroSchemaToField(schema.getValueType(), "value", config);
-
-        FieldType structFieldType = new FieldType(false, new ArrowType.Struct(), /*dictionary=*/null);
-        Field structField = new Field("internal", structFieldType, Arrays.asList(keyField, valueField));
-        children.add(structField);
-        fieldType = createFieldType(new ArrowType.Map(/*keySorted=*/false), schema, externalProps);
-        break;
-      case RECORD:
-        final Set<String> skipFieldNames = config.getSkipFieldNames();
-        for (int i = 0; i < schema.getFields().size(); i++) {
-          final Schema.Field field = schema.getFields().get(i);
-          Schema childSchema = field.schema();
-          String fullChildName = String.format("%s.%s", name, field.name());
-          if (!skipFieldNames.contains(fullChildName)) {
-            final Map<String, String> extProps = new HashMap<>();
-            String doc = field.doc();
-            Set<String> aliases = field.aliases();
-            if (doc != null) {
-              extProps.put("doc", doc);
-            }
-            if (aliases != null) {
-              extProps.put("aliases", convertAliases(aliases));
-            }
-            children.add(avroSchemaToField(childSchema, fullChildName, config, extProps));
-          }
-        }
-        fieldType = createFieldType(new ArrowType.Struct(), schema, externalProps);
-        break;
-      case ENUM:
-        DictionaryProvider.MapDictionaryProvider provider = config.getProvider();
-        int current = provider.getDictionaryIds().size();
-        int enumCount = schema.getEnumSymbols().size();
-        ArrowType.Int indexType = DictionaryEncoder.getIndexType(enumCount);
-
-        fieldType = createFieldType(indexType, schema, externalProps,
-            new DictionaryEncoding(current, /*ordered=*/false, /*indexType=*/indexType));
-        break;
-
-      case STRING:
-        fieldType = createFieldType(new ArrowType.Utf8(), schema, externalProps);
-        break;
-      case FIXED:
-        final ArrowType fixedArrowType;
-        if (logicalType instanceof LogicalTypes.Decimal) {
-          fixedArrowType = createDecimalArrowType((LogicalTypes.Decimal) logicalType);
-        } else {
-          fixedArrowType = new ArrowType.FixedSizeBinary(schema.getFixedSize());
-        }
-        fieldType = createFieldType(fixedArrowType, schema, externalProps);
-        break;
-      case INT:
-        final ArrowType intArrowType;
-        if (logicalType instanceof LogicalTypes.Date) {
-          intArrowType = new ArrowType.Date(DateUnit.DAY);
-        } else if (logicalType instanceof LogicalTypes.TimeMillis) {
-          intArrowType = new ArrowType.Time(TimeUnit.MILLISECOND, 32);
-        } else {
-          intArrowType = new ArrowType.Int(32, /*signed=*/true);
-        }
-        fieldType = createFieldType(intArrowType, schema, externalProps);
-        break;
-      case BOOLEAN:
-        fieldType = createFieldType(new ArrowType.Bool(), schema, externalProps);
-        break;
-      case LONG:
-        final ArrowType longArrowType;
-        if (logicalType instanceof LogicalTypes.TimeMicros) {
-          longArrowType = new ArrowType.Time(TimeUnit.MICROSECOND, 64);
-        } else if (logicalType instanceof LogicalTypes.TimestampMillis) {
-          longArrowType = new ArrowType.Timestamp(TimeUnit.MILLISECOND, null);
-        } else if (logicalType instanceof LogicalTypes.TimestampMicros) {
-          longArrowType = new ArrowType.Timestamp(TimeUnit.MICROSECOND, null);
-        } else {
-          longArrowType = new ArrowType.Int(64, /*signed=*/true);
-        }
-        fieldType = createFieldType(longArrowType, schema, externalProps);
-        break;
-      case FLOAT:
-        fieldType = createFieldType(new ArrowType.FloatingPoint(SINGLE), schema, externalProps);
-        break;
-      case DOUBLE:
-        fieldType = createFieldType(new ArrowType.FloatingPoint(DOUBLE), schema, externalProps);
-        break;
-      case BYTES:
-        final ArrowType bytesArrowType;
-        if (logicalType instanceof LogicalTypes.Decimal) {
-          bytesArrowType = createDecimalArrowType((LogicalTypes.Decimal) logicalType);
-        } else {
-          bytesArrowType = new ArrowType.Binary();
-        }
-        fieldType = createFieldType(bytesArrowType, schema, externalProps);
-        break;
-      case NULL:
-        fieldType = createFieldType(ArrowType.Null.INSTANCE, schema, externalProps);
-        break;
-      default:
-        // no-op, shouldn't get here
-        throw new UnsupportedOperationException();
-    }
-
-    if (name == null) {
-      name = getDefaultFieldName(fieldType.getType());
-    }
-    return new Field(name, fieldType, children.size() == 0 ? null : children);
-  }
-
-  private static Consumer createArrayConsumer(Schema schema, String name, AvroToArrowConfig config,
-      FieldVector consumerVector) {
-
-    ListVector listVector;
-    if (consumerVector == null) {
-      final Field field = avroSchemaToField(schema, name, config);
-      listVector = (ListVector) field.createVector(config.getAllocator());
-    } else {
-      listVector = (ListVector) consumerVector;
-    }
-
-    FieldVector dataVector = listVector.getDataVector();
-
-    // create delegate
-    Schema childSchema = schema.getElementType();
-    Consumer delegate = createConsumer(childSchema, childSchema.getName(), config, dataVector);
-
-    return new AvroArraysConsumer(listVector, delegate);
-  }
-
-  private static Consumer createStructConsumer(Schema schema, String name, AvroToArrowConfig config,
-      FieldVector consumerVector) {
-
-    final Set<String> skipFieldNames = config.getSkipFieldNames();
-
-    StructVector structVector;
-    if (consumerVector == null) {
-      final Field field = avroSchemaToField(schema, name, config, createExternalProps(schema));
-      structVector = (StructVector) field.createVector(config.getAllocator());
-    } else {
-      structVector = (StructVector) consumerVector;
-    }
-
-    Consumer[] delegates = new Consumer[schema.getFields().size()];
-    int vectorIndex = 0;
-    for (int i = 0; i < schema.getFields().size(); i++) {
-      Schema.Field childField = schema.getFields().get(i);
-      Consumer delegate;
-      // use full name to distinguish fields have same names between parent and child fields.
-      final String fullChildName = String.format("%s.%s", name, childField.name());
-      if (skipFieldNames.contains(fullChildName)) {
-        delegate = createSkipConsumer(childField.schema());
-      } else {
-        delegate = createConsumer(childField.schema(), fullChildName, config,
-            structVector.getChildrenFromFields().get(vectorIndex++));
-      }
-
-      delegates[i] = delegate;
-    }
-
-    return new AvroStructConsumer(structVector, delegates);
-
-  }
-
-  private static Consumer createEnumConsumer(Schema schema, String name, AvroToArrowConfig config,
-      FieldVector consumerVector) {
-
-    BaseIntVector indexVector;
-    if (consumerVector == null) {
-      final Field field = avroSchemaToField(schema, name, config, createExternalProps(schema));
-      indexVector = (BaseIntVector) field.createVector(config.getAllocator());
-    } else {
-      indexVector = (BaseIntVector) consumerVector;
-    }
-
-    final int valueCount = schema.getEnumSymbols().size();
-    VarCharVector dictVector = new VarCharVector(name, config.getAllocator());
-    dictVector.allocateNewSafe();
-    dictVector.setValueCount(valueCount);
-    for (int i = 0; i < valueCount; i++) {
-      dictVector.set(i, schema.getEnumSymbols().get(i).getBytes(StandardCharsets.UTF_8));
-    }
-    Dictionary dictionary =
-        new Dictionary(dictVector, indexVector.getField().getDictionary());
-    config.getProvider().put(dictionary);
-
-    return new AvroEnumConsumer(indexVector);
-
-  }
-
-  private static Consumer createMapConsumer(Schema schema, String name, AvroToArrowConfig config,
-      FieldVector consumerVector) {
-
-    MapVector mapVector;
-    if (consumerVector == null) {
-      final Field field = avroSchemaToField(schema, name, config);
-      mapVector = (MapVector) field.createVector(config.getAllocator());
-    } else {
-      mapVector = (MapVector) consumerVector;
-    }
-
-    // create delegate struct consumer
-    StructVector structVector = (StructVector) mapVector.getDataVector();
-
-    // keys in avro map are always assumed to be strings.
-    Consumer keyConsumer = new AvroStringConsumer(
-        (VarCharVector) structVector.getChildrenFromFields().get(0));
-    Consumer valueConsumer = createConsumer(schema.getValueType(), schema.getValueType().getName(),
-        config, structVector.getChildrenFromFields().get(1));
-
-    AvroStructConsumer internalConsumer =
-        new AvroStructConsumer(structVector, new Consumer[] {keyConsumer, valueConsumer});
-
-    return new AvroMapConsumer(mapVector, internalConsumer);
-  }
-
-  private static Consumer createUnionConsumer(Schema schema, String name, AvroToArrowConfig config,
-      FieldVector consumerVector) {
-    final int size = schema.getTypes().size();
-
-    final boolean nullable = schema.getTypes().stream().anyMatch(t -> t.getType() == Type.NULL);
-
-    UnionVector unionVector;
-    if (consumerVector == null) {
-      final Field field = avroSchemaToField(schema, name, config);
-      unionVector = (UnionVector) field.createVector(config.getAllocator());
-    } else {
-      unionVector = (UnionVector) consumerVector;
-    }
-
-    List<FieldVector> childVectors = unionVector.getChildrenFromFields();
-
-    Consumer[] delegates = new Consumer[size];
-    Types.MinorType[] types = new Types.MinorType[size];
-
-    for (int i = 0; i < size; i++) {
-      FieldVector child = childVectors.get(i);
-      Schema subSchema = schema.getTypes().get(i);
-      Consumer delegate = createConsumer(subSchema, subSchema.getName(), nullable, config, child);
-      delegates[i] = delegate;
-      types[i] = child.getMinorType();
-    }
-    return new AvroUnionsConsumer(unionVector, delegates, types);
-  }
-
-  /**
-   * Read data from {@link Decoder} and generate a {@link VectorSchemaRoot}.
-   * @param schema avro schema
-   * @param decoder avro decoder to read data from
-   */
-  static VectorSchemaRoot avroToArrowVectors(
-      Schema schema,
-      Decoder decoder,
-      AvroToArrowConfig config)
-      throws IOException {
-
-    List<FieldVector> vectors = new ArrayList<>();
-    List<Consumer> consumers = new ArrayList<>();
-    final Set<String> skipFieldNames = config.getSkipFieldNames();
-
-    Schema.Type type = schema.getType();
-    if (type == Type.RECORD) {
-      for (Schema.Field field : schema.getFields()) {
-        if (skipFieldNames.contains(field.name())) {
-          consumers.add(createSkipConsumer(field.schema()));
-        } else {
-          Consumer consumer = createConsumer(field.schema(), field.name(), config);
-          consumers.add(consumer);
-          vectors.add(consumer.getVector());
-        }
-      }
-    } else {
-      Consumer consumer = createConsumer(schema, "", config);
-      consumers.add(consumer);
-      vectors.add(consumer.getVector());
-    }
-
-    long validConsumerCount = consumers.stream().filter(c -> !c.skippable()).count();
-    Preconditions.checkArgument(vectors.size() == validConsumerCount,
-        "vectors size not equals consumers size.");
-
-    List<Field> fields = vectors.stream().map(t -> t.getField()).collect(Collectors.toList());
-
-    VectorSchemaRoot root = new VectorSchemaRoot(fields, vectors, 0);
-
-    CompositeAvroConsumer compositeConsumer = new CompositeAvroConsumer(consumers);
-
-    int valueCount = 0;
-    try {
-      while (true) {
-        ValueVectorUtility.ensureCapacity(root, valueCount + 1);
-        compositeConsumer.consume(decoder);
-        valueCount++;
-      }
-    } catch (EOFException eof) {
-      // reach the end of encoder stream.
-      root.setRowCount(valueCount);
-    } catch (Exception e) {
-      compositeConsumer.close();
-      throw new UnsupportedOperationException("Error occurs while consume process.", e);
-    }
-
-    return root;
-  }
-
-  private static Map<String, String> getMetaData(Schema schema) {
-    Map<String, String> metadata = new HashMap<>();
-    schema.getObjectProps().forEach((k, v) -> metadata.put(k, v.toString()));
-    return metadata;
-  }
-
-  private static Map<String, String> getMetaData(Schema schema, Map<String, String> externalProps) {
-    Map<String, String> metadata = getMetaData(schema);
-    if (externalProps != null) {
-      metadata.putAll(externalProps);
-    }
-    return metadata;
-  }
-
-  /**
-   * Parse avro attributes and convert them to metadata.
-   */
-  private static Map<String, String> createExternalProps(Schema schema) {
-    final Map<String, String> extProps = new HashMap<>();
-    String doc = schema.getDoc();
-    Set<String> aliases = schema.getAliases();
-    if (doc != null) {
-      extProps.put("doc", doc);
-    }
-    if (aliases != null) {
-      extProps.put("aliases", convertAliases(aliases));
-    }
-    return extProps;
-  }
-
-  private static FieldType createFieldType(ArrowType arrowType, Schema schema, Map<String, String> externalProps) {
-    return createFieldType(arrowType, schema, externalProps, /*dictionary=*/null);
-  }
-
-  private static FieldType createFieldType(
-      ArrowType arrowType,
-      Schema schema,
-      Map<String, String> externalProps,
-      DictionaryEncoding dictionary) {
-
-    return new FieldType(/*nullable=*/false, arrowType, dictionary,
-        getMetaData(schema, externalProps));
-  }
-
-  private static String convertAliases(Set<String> aliases) {
-    JsonStringArrayList jsonList = new JsonStringArrayList();
-    aliases.stream().forEach(a -> jsonList.add(a));
-    return jsonList.toString();
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowVectorIterator.java b/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowVectorIterator.java
deleted file mode 100644
index 1faa759..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrowVectorIterator.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import java.io.EOFException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.stream.Collectors;
-
-import org.apache.arrow.consumers.CompositeAvroConsumer;
-import org.apache.arrow.util.Preconditions;
-import org.apache.arrow.vector.FieldVector;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.types.pojo.Field;
-import org.apache.arrow.vector.util.ValueVectorUtility;
-import org.apache.avro.Schema;
-import org.apache.avro.io.Decoder;
-
-/**
- * VectorSchemaRoot iterator for partially converting avro data.
- */
-public class AvroToArrowVectorIterator implements Iterator<VectorSchemaRoot>, AutoCloseable {
-
-  public static final int NO_LIMIT_BATCH_SIZE = -1;
-  public static final int DEFAULT_BATCH_SIZE = 1024;
-
-  private final Decoder decoder;
-  private final Schema schema;
-
-  private final AvroToArrowConfig config;
-
-  private CompositeAvroConsumer compositeConsumer;
-
-  private org.apache.arrow.vector.types.pojo.Schema rootSchema;
-
-  private VectorSchemaRoot nextBatch;
-
-  private final int targetBatchSize;
-
-  /**
-   * Construct an instance.
-   */
-  private AvroToArrowVectorIterator(
-      Decoder decoder,
-      Schema schema,
-      AvroToArrowConfig config) {
-
-    this.decoder = decoder;
-    this.schema = schema;
-    this.config = config;
-    this.targetBatchSize = config.getTargetBatchSize();
-
-  }
-
-  /**
-   * Create a ArrowVectorIterator to partially convert data.
-   */
-  public static AvroToArrowVectorIterator create(
-      Decoder decoder,
-      Schema schema,
-      AvroToArrowConfig config) {
-
-    AvroToArrowVectorIterator iterator = new AvroToArrowVectorIterator(decoder, schema, config);
-    try {
-      iterator.initialize();
-      return iterator;
-    } catch (Exception e) {
-      iterator.close();
-      throw new RuntimeException("Error occurs while creating iterator.", e);
-    }
-  }
-
-  private void initialize() {
-    // create consumers
-    compositeConsumer = AvroToArrowUtils.createCompositeConsumer(schema, config);
-    List<FieldVector> vectors = new ArrayList<>();
-    compositeConsumer.getConsumers().forEach(c -> vectors.add(c.getVector()));
-    List<Field> fields = vectors.stream().map(t -> t.getField()).collect(Collectors.toList());
-    VectorSchemaRoot root = new VectorSchemaRoot(fields, vectors, 0);
-    rootSchema = root.getSchema();
-
-    load(root);
-  }
-
-  private void consumeData(VectorSchemaRoot root) {
-    int readRowCount = 0;
-    try {
-      while ((targetBatchSize == NO_LIMIT_BATCH_SIZE || readRowCount < targetBatchSize)) {
-        compositeConsumer.consume(decoder);
-        readRowCount++;
-      }
-
-      if (targetBatchSize == NO_LIMIT_BATCH_SIZE) {
-        while (true) {
-          ValueVectorUtility.ensureCapacity(root, readRowCount + 1);
-          compositeConsumer.consume(decoder);
-          readRowCount++;
-        }
-      } else {
-        while (readRowCount < targetBatchSize) {
-          compositeConsumer.consume(decoder);
-          readRowCount++;
-        }
-      }
-
-      root.setRowCount(readRowCount);
-    } catch (EOFException eof) {
-      // reach the end of encoder stream.
-      root.setRowCount(readRowCount);
-    } catch (Exception e) {
-      compositeConsumer.close();
-      throw new RuntimeException("Error occurs while consuming data.", e);
-    }
-  }
-
-  // Loads the next schema root or null if no more rows are available.
-  private void load(VectorSchemaRoot root) {
-    final int targetBatchSize = config.getTargetBatchSize();
-    if (targetBatchSize != NO_LIMIT_BATCH_SIZE) {
-      ValueVectorUtility.preAllocate(root, targetBatchSize);
-    }
-
-    long validConsumerCount = compositeConsumer.getConsumers().stream().filter(c ->
-        !c.skippable()).count();
-    Preconditions.checkArgument(root.getFieldVectors().size() == validConsumerCount,
-        "Schema root vectors size not equals to consumers size.");
-
-    compositeConsumer.resetConsumerVectors(root);
-
-    // consume data
-    consumeData(root);
-
-    if (root.getRowCount() == 0) {
-      root.close();
-      nextBatch = null;
-    } else {
-      nextBatch = root;
-    }
-  }
-
-  @Override
-  public boolean hasNext() {
-    return nextBatch != null;
-  }
-
-  /**
-   * Gets the next vector. The user is responsible for freeing its resources.
-   */
-  public VectorSchemaRoot next() {
-    Preconditions.checkArgument(hasNext());
-    VectorSchemaRoot returned = nextBatch;
-    try {
-      load(VectorSchemaRoot.create(rootSchema, config.getAllocator()));
-    } catch (Exception e) {
-      returned.close();
-      throw new RuntimeException("Error occurs while getting next schema root.", e);
-    }
-    return returned;
-  }
-
-  /**
-   * Clean up resources.
-   */
-  public void close() {
-    if (nextBatch != null) {
-      nextBatch.close();
-    }
-    compositeConsumer.close();
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroArraysConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroArraysConsumer.java
deleted file mode 100644
index b9d0f84..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroArraysConsumer.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.complex.ListVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume array type values from avro decoder.
- * Write the data to {@link ListVector}.
- */
-public class AvroArraysConsumer extends BaseAvroConsumer<ListVector> {
-
-  private final Consumer delegate;
-
-  /**
-   * Instantiate a ArrayConsumer.
-   */
-  public AvroArraysConsumer(ListVector vector, Consumer delegate) {
-    super(vector);
-    this.delegate = delegate;
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-
-    vector.startNewValue(currentIndex);
-    long totalCount = 0;
-    for (long count = decoder.readArrayStart(); count != 0; count = decoder.arrayNext()) {
-      totalCount += count;
-      ensureInnerVectorCapacity(totalCount);
-      for (int element = 0; element < count; element++) {
-        delegate.consume(decoder);
-      }
-    }
-    vector.endValue(currentIndex, (int) totalCount);
-    currentIndex++;
-  }
-
-  @Override
-  public void close() throws Exception {
-    super.close();
-    delegate.close();
-  }
-
-  @Override
-  public boolean resetValueVector(ListVector vector) {
-    this.delegate.resetValueVector(vector.getDataVector());
-    return super.resetValueVector(vector);
-  }
-
-  void ensureInnerVectorCapacity(long targetCapacity) {
-    while (vector.getDataVector().getValueCapacity() < targetCapacity) {
-      vector.getDataVector().reAlloc();
-    }
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroBooleanConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroBooleanConsumer.java
deleted file mode 100644
index 4ca5f24..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroBooleanConsumer.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.BitVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume boolean type values from avro decoder.
- * Write the data to {@link BitVector}.
- */
-public class AvroBooleanConsumer extends BaseAvroConsumer<BitVector> {
-
-  /**
-   * Instantiate a AvroBooleanConsumer.
-   */
-  public AvroBooleanConsumer(BitVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.set(currentIndex, decoder.readBoolean() ? 1 : 0);
-    currentIndex++;
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroBytesConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroBytesConsumer.java
deleted file mode 100644
index eede68e..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroBytesConsumer.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import org.apache.arrow.vector.VarBinaryVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume bytes type values from avro decoder.
- * Write the data to {@link VarBinaryVector}.
- */
-public class AvroBytesConsumer extends BaseAvroConsumer<VarBinaryVector> {
-
-  private ByteBuffer cacheBuffer;
-
-  /**
-   * Instantiate a AvroBytesConsumer.
-   */
-  public AvroBytesConsumer(VarBinaryVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    // cacheBuffer is initialized null and create in the first consume,
-    // if its capacity < size to read, decoder will create a new one with new capacity.
-    cacheBuffer = decoder.readBytes(cacheBuffer);
-    vector.setSafe(currentIndex, cacheBuffer, 0, cacheBuffer.limit());
-    currentIndex++;
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroDoubleConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroDoubleConsumer.java
deleted file mode 100644
index 356707a..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroDoubleConsumer.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.Float8Vector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume double type values from avro decoder.
- * Write the data to {@link Float8Vector}.
- */
-public class AvroDoubleConsumer extends BaseAvroConsumer<Float8Vector> {
-
-  /**
-   * Instantiate a AvroDoubleConsumer.
-   */
-  public AvroDoubleConsumer(Float8Vector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.set(currentIndex++, decoder.readDouble());
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroEnumConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroEnumConsumer.java
deleted file mode 100644
index 2f4443b..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroEnumConsumer.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.BaseIntVector;
-import org.apache.arrow.vector.IntVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume enum type values from avro decoder.
- * Write the data to {@link IntVector}.
- */
-public class AvroEnumConsumer extends BaseAvroConsumer<BaseIntVector> {
-
-  /**
-   * Instantiate a AvroEnumConsumer.
-   */
-  public AvroEnumConsumer(BaseIntVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.setWithPossibleTruncate(currentIndex++, decoder.readEnum());
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroFixedConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroFixedConsumer.java
deleted file mode 100644
index a065466..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroFixedConsumer.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.FixedSizeBinaryVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume fixed type values from avro decoder.
- * Write the data to {@link org.apache.arrow.vector.FixedSizeBinaryVector}.
- */
-public class AvroFixedConsumer extends BaseAvroConsumer<FixedSizeBinaryVector> {
-
-  private final byte[] reuseBytes;
-
-  /**
-   * Instantiate a AvroFixedConsumer.
-   */
-  public AvroFixedConsumer(FixedSizeBinaryVector vector, int size) {
-    super(vector);
-    reuseBytes = new byte[size];
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    decoder.readFixed(reuseBytes);
-    vector.setSafe(currentIndex++, reuseBytes);
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroFloatConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroFloatConsumer.java
deleted file mode 100644
index c8de4a2..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroFloatConsumer.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.Float4Vector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume float type values from avro decoder.
- * Write the data to {@link Float4Vector}.
- */
-public class AvroFloatConsumer extends BaseAvroConsumer<Float4Vector> {
-
-  /**
-   * Instantiate a AvroFloatConsumer.
-   */
-  public AvroFloatConsumer(Float4Vector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.set(currentIndex++, decoder.readFloat());
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroIntConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroIntConsumer.java
deleted file mode 100644
index bc8d4de..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroIntConsumer.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.IntVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume int type values from avro decoder.
- * Write the data to {@link IntVector}.
- */
-public class AvroIntConsumer extends BaseAvroConsumer<IntVector> {
-
-  /**
-   * Instantiate a AvroIntConsumer.
-   */
-  public AvroIntConsumer(IntVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.set(currentIndex++, decoder.readInt());
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroLongConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroLongConsumer.java
deleted file mode 100644
index b9016c5..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroLongConsumer.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.BigIntVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume long type values from avro decoder.
- * Write the data to {@link BigIntVector}.
- */
-public class AvroLongConsumer extends BaseAvroConsumer<BigIntVector> {
-
-  /**
-   * Instantiate a AvroLongConsumer.
-   */
-  public AvroLongConsumer(BigIntVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.set(currentIndex++, decoder.readLong());
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroMapConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroMapConsumer.java
deleted file mode 100644
index b8e8bd5..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroMapConsumer.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.FieldVector;
-import org.apache.arrow.vector.complex.MapVector;
-import org.apache.arrow.vector.complex.StructVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume map type values from avro decoder.
- * Write the data to {@link MapVector}.
- */
-public class AvroMapConsumer extends BaseAvroConsumer<MapVector> {
-
-  private final Consumer delegate;
-
-  /**
-   * Instantiate a AvroMapConsumer.
-   */
-  public AvroMapConsumer(MapVector vector, Consumer delegate) {
-    super(vector);
-    this.delegate = delegate;
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-
-    vector.startNewValue(currentIndex);
-    long totalCount = 0;
-    for (long count = decoder.readMapStart(); count != 0; count = decoder.mapNext()) {
-      totalCount += count;
-      ensureInnerVectorCapacity(totalCount);
-      for (int element = 0; element < count; element++) {
-        delegate.consume(decoder);
-      }
-    }
-    vector.endValue(currentIndex, (int) totalCount);
-    currentIndex++;
-  }
-
-  @Override
-  public void close() throws Exception {
-    super.close();
-    delegate.close();
-  }
-
-  @Override
-  public boolean resetValueVector(MapVector vector) {
-    this.delegate.resetValueVector(vector.getDataVector());
-    return super.resetValueVector(vector);
-  }
-
-  void ensureInnerVectorCapacity(long targetCapacity) {
-    StructVector innerVector = (StructVector) vector.getDataVector();
-    for (FieldVector v : innerVector.getChildrenFromFields()) {
-      while (v.getValueCapacity() < targetCapacity) {
-        v.reAlloc();
-      }
-    }
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroNullConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroNullConsumer.java
deleted file mode 100644
index 6476800..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroNullConsumer.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.NullVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume null type values from avro decoder.
- * Corresponding to {@link org.apache.arrow.vector.NullVector}.
- */
-public class AvroNullConsumer extends BaseAvroConsumer<NullVector> {
-
-  public AvroNullConsumer(NullVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    currentIndex++;
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroStringConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroStringConsumer.java
deleted file mode 100644
index 10fe234..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroStringConsumer.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import org.apache.arrow.vector.VarCharVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume string type values from avro decoder.
- * Write the data to {@link VarCharVector}.
- */
-public class AvroStringConsumer extends BaseAvroConsumer<VarCharVector> {
-
-  private ByteBuffer cacheBuffer;
-
-  /**
-   * Instantiate a AvroStringConsumer.
-   */
-  public AvroStringConsumer(VarCharVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    // cacheBuffer is initialized null and create in the first consume,
-    // if its capacity < size to read, decoder will create a new one with new capacity.
-    cacheBuffer = decoder.readBytes(cacheBuffer);
-    vector.setSafe(currentIndex++, cacheBuffer, 0, cacheBuffer.limit());
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroStructConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroStructConsumer.java
deleted file mode 100644
index 792d01e..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroStructConsumer.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.util.AutoCloseables;
-import org.apache.arrow.vector.FieldVector;
-import org.apache.arrow.vector.complex.StructVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume nested record type values from avro decoder.
- * Write the data to {@link org.apache.arrow.vector.complex.StructVector}.
- */
-public class AvroStructConsumer extends BaseAvroConsumer<StructVector> {
-
-  private final Consumer[] delegates;
-
-  /**
-   * Instantiate a AvroStructConsumer.
-   */
-  public AvroStructConsumer(StructVector vector, Consumer[] delegates) {
-    super(vector);
-    this.delegates = delegates;
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-
-    ensureInnerVectorCapacity(currentIndex + 1);
-    for (int i = 0; i < delegates.length; i++) {
-      delegates[i].consume(decoder);
-    }
-    vector.setIndexDefined(currentIndex);
-    currentIndex++;
-
-  }
-
-  @Override
-  public void close() throws Exception {
-    super.close();
-    AutoCloseables.close(delegates);
-  }
-
-  @Override
-  public boolean resetValueVector(StructVector vector) {
-    for (int i = 0; i < delegates.length; i++) {
-      delegates[i].resetValueVector(vector.getChildrenFromFields().get(i));
-    }
-    return super.resetValueVector(vector);
-  }
-
-  void ensureInnerVectorCapacity(long targetCapacity) {
-    for (FieldVector v : vector.getChildrenFromFields()) {
-      while (v.getValueCapacity() < targetCapacity) {
-        v.reAlloc();
-      }
-    }
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroUnionsConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroUnionsConsumer.java
deleted file mode 100644
index c0bb020..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/AvroUnionsConsumer.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.util.AutoCloseables;
-import org.apache.arrow.vector.ValueVector;
-import org.apache.arrow.vector.complex.UnionVector;
-import org.apache.arrow.vector.types.Types;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume unions type values from avro decoder.
- * Write the data to {@link org.apache.arrow.vector.complex.UnionVector}.
- */
-public class AvroUnionsConsumer extends BaseAvroConsumer<UnionVector> {
-
-  private Consumer[] delegates;
-  private Types.MinorType[] types;
-
-  /**
-   * Instantiate an AvroUnionConsumer.
-   */
-  public AvroUnionsConsumer(UnionVector vector, Consumer[] delegates, Types.MinorType[] types) {
-
-    super(vector);
-    this.delegates = delegates;
-    this.types = types;
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    int fieldIndex = decoder.readInt();
-
-    ensureInnerVectorCapacity(currentIndex + 1, fieldIndex);
-    Consumer delegate = delegates[fieldIndex];
-
-    vector.setType(currentIndex, types[fieldIndex]);
-    // In UnionVector we need to set sub vector writer position before consume a value
-    // because in the previous iterations we might not have written to the specific union sub vector.
-    delegate.setPosition(currentIndex);
-    delegate.consume(decoder);
-
-    currentIndex++;
-  }
-
-  @Override
-  public void close() throws Exception {
-    super.close();
-    AutoCloseables.close(delegates);
-  }
-
-  @Override
-  public boolean resetValueVector(UnionVector vector) {
-    for (int i = 0; i < delegates.length; i++) {
-      delegates[i].resetValueVector(vector.getChildrenFromFields().get(i));
-    }
-    return super.resetValueVector(vector);
-  }
-
-  void ensureInnerVectorCapacity(long targetCapacity, int fieldIndex) {
-    ValueVector fieldVector = vector.getChildrenFromFields().get(fieldIndex);
-    if (fieldVector.getMinorType() == Types.MinorType.NULL) {
-      return;
-    }
-    while (fieldVector.getValueCapacity() < targetCapacity) {
-      fieldVector.reAlloc();
-    }
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/BaseAvroConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/BaseAvroConsumer.java
deleted file mode 100644
index 303be8e..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/BaseAvroConsumer.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import org.apache.arrow.vector.FieldVector;
-
-/**
- * Base class for non-skippable avro consumers.
- * @param <T> vector type.
- */
-public abstract class BaseAvroConsumer<T extends FieldVector> implements Consumer<T> {
-
-  protected T vector;
-  protected int currentIndex;
-
-  /**
-   * Constructs a base avro consumer.
-   * @param vector the vector to consume.
-   */
-  public BaseAvroConsumer(T vector) {
-    this.vector = vector;
-  }
-
-  @Override
-  public void addNull() {
-    currentIndex++;
-  }
-
-  @Override
-  public void setPosition(int index) {
-    currentIndex = index;
-  }
-
-  @Override
-  public FieldVector getVector() {
-    return vector;
-  }
-
-  @Override
-  public void close() throws Exception {
-    vector.close();
-  }
-
-  @Override
-  public boolean resetValueVector(T vector) {
-    this.vector = vector;
-    this.currentIndex = 0;
-    return true;
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/CompositeAvroConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/CompositeAvroConsumer.java
deleted file mode 100644
index af476d2..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/CompositeAvroConsumer.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.arrow.util.AutoCloseables;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.avro.io.Decoder;
-
-/**
- * Composite consumer which hold all consumers.
- * It manages the consume and cleanup process.
- */
-public class CompositeAvroConsumer implements AutoCloseable {
-
-  private final List<Consumer> consumers;
-
-  public List<Consumer> getConsumers() {
-    return consumers;
-  }
-
-  public CompositeAvroConsumer(List<Consumer> consumers) {
-    this.consumers = consumers;
-  }
-
-  /**
-   * Consume decoder data.
-   */
-  public void consume(Decoder decoder) throws IOException {
-    for (Consumer consumer : consumers) {
-      consumer.consume(decoder);
-    }
-  }
-
-  /**
-   * Reset vector of consumers with the given {@link VectorSchemaRoot}.
-   */
-  public void resetConsumerVectors(VectorSchemaRoot root) {
-    int index = 0;
-    for (Consumer consumer : consumers) {
-      if (consumer.resetValueVector(root.getFieldVectors().get(index))) {
-        index++;
-      }
-    }
-  }
-
-  @Override
-  public void close() {
-    // clean up
-    try {
-      AutoCloseables.close(consumers);
-    } catch (Exception e) {
-      throw new RuntimeException("Error occurs in close.", e);
-    }
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/Consumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/Consumer.java
deleted file mode 100644
index 8c4ee9a..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/Consumer.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.FieldVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Interface that is used to consume values from avro decoder.
- * @param <T> The vector within consumer or its delegate, used for partially consume purpose.
- */
-public interface Consumer<T extends FieldVector> extends AutoCloseable {
-
-  /**
-   * Consume a specific type value from avro decoder and write it to vector.
-   * @param decoder avro decoder to read data
-   * @throws IOException on error
-   */
-  void consume(Decoder decoder) throws IOException;
-
-  /**
-   * Add null value to vector by making writer position + 1.
-   */
-  void addNull();
-
-  /**
-   * Set the position to write value into vector.
-   */
-  void setPosition(int index);
-
-  /**
-   * Get the vector within the consumer.
-   */
-  FieldVector getVector();
-
-  /**
-   * Close this consumer when occurs exception to avoid potential leak.
-   */
-  void close() throws Exception;
-
-  /**
-   * Reset the vector within consumer for partial read purpose.
-   * @return true if reset is successful, false if reset is not needed.
-   */
-  boolean resetValueVector(T vector);
-
-  /**
-   * Indicates whether the consumer is type of {@link SkipConsumer}.
-   */
-  default boolean skippable() {
-    return false;
-  }
-
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/SkipConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/SkipConsumer.java
deleted file mode 100644
index 94c5b33..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/SkipConsumer.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.arrow.vector.FieldVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which skip (throw away) data from the decoder.
- */
-public class SkipConsumer implements Consumer {
-
-  private final SkipFunction skipFunction;
-
-  public SkipConsumer(SkipFunction skipFunction) {
-    this.skipFunction = skipFunction;
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    skipFunction.apply(decoder);
-  }
-
-  @Override
-  public void addNull() {
-  }
-
-  @Override
-  public void setPosition(int index) {
-  }
-
-  @Override
-  public FieldVector getVector() {
-    return null;
-  }
-
-  @Override
-  public void close() throws Exception {
-  }
-
-  @Override
-  public boolean resetValueVector(FieldVector vector) {
-    return false;
-  }
-
-  @Override
-  public boolean skippable() {
-    return true;
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/SkipFunction.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/SkipFunction.java
deleted file mode 100644
index 6193891..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/SkipFunction.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers;
-
-import java.io.IOException;
-
-import org.apache.avro.io.Decoder;
-
-/**
- * Adapter function to skip (throw away) data from the decoder.
- */
-@FunctionalInterface
-public interface SkipFunction {
-  void apply(Decoder decoder) throws IOException;
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroDateConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroDateConsumer.java
deleted file mode 100644
index 3aa8970..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroDateConsumer.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers.logical;
-
-import java.io.IOException;
-
-import org.apache.arrow.consumers.BaseAvroConsumer;
-import org.apache.arrow.vector.DateDayVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume date type values from avro decoder.
- * Write the data to {@link DateDayVector}.
- */
-public class AvroDateConsumer extends BaseAvroConsumer<DateDayVector> {
-
-  /**
-   * Instantiate a AvroDateConsumer.
-   */
-  public AvroDateConsumer(DateDayVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.set(currentIndex++, decoder.readInt());
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroDecimalConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroDecimalConsumer.java
deleted file mode 100644
index 24d73cf..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroDecimalConsumer.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers.logical;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import org.apache.arrow.consumers.BaseAvroConsumer;
-import org.apache.arrow.util.Preconditions;
-import org.apache.arrow.vector.DecimalVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume decimal type values from avro decoder.
- * Write the data to {@link DecimalVector}.
- */
-public abstract class AvroDecimalConsumer extends BaseAvroConsumer<DecimalVector> {
-
-  /**
-   * Instantiate a AvroDecimalConsumer.
-   */
-  public AvroDecimalConsumer(DecimalVector vector) {
-    super(vector);
-  }
-
-  /**
-   * Consumer for decimal logical type with original bytes type.
-   */
-  public static class BytesDecimalConsumer extends AvroDecimalConsumer {
-
-    private ByteBuffer cacheBuffer;
-
-    /**
-     * Instantiate a BytesDecimalConsumer.
-     */
-    public BytesDecimalConsumer(DecimalVector vector) {
-      super(vector);
-    }
-
-    @Override
-    public void consume(Decoder decoder) throws IOException {
-      cacheBuffer = decoder.readBytes(cacheBuffer);
-      byte[] bytes = new byte[cacheBuffer.limit()];
-      Preconditions.checkArgument(bytes.length <= 16, "Decimal bytes length should <= 16.");
-      cacheBuffer.get(bytes);
-      vector.setBigEndian(currentIndex++, bytes);
-    }
-
-  }
-
-  /**
-   * Consumer for decimal logical type with original fixed type.
-   */
-  public static class FixedDecimalConsumer extends AvroDecimalConsumer {
-
-    private byte[] reuseBytes;
-
-    /**
-     * Instantiate a FixedDecimalConsumer.
-     */
-    public FixedDecimalConsumer(DecimalVector vector, int size) {
-      super(vector);
-      Preconditions.checkArgument(size <= 16, "Decimal bytes length should <= 16.");
-      reuseBytes = new byte[size];
-    }
-
-    @Override
-    public void consume(Decoder decoder) throws IOException {
-      decoder.readFixed(reuseBytes);
-      vector.setBigEndian(currentIndex++, reuseBytes);
-    }
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimeMicroConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimeMicroConsumer.java
deleted file mode 100644
index e68ba15..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimeMicroConsumer.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers.logical;
-
-import java.io.IOException;
-
-import org.apache.arrow.consumers.BaseAvroConsumer;
-import org.apache.arrow.vector.TimeMicroVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume date time-micro values from avro decoder.
- * Write the data to {@link TimeMicroVector}.
- */
-public class AvroTimeMicroConsumer extends BaseAvroConsumer<TimeMicroVector> {
-
-  /**
-   * Instantiate a AvroTimeMicroConsumer.
-   */
-  public AvroTimeMicroConsumer(TimeMicroVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.set(currentIndex++, decoder.readLong());
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimeMillisConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimeMillisConsumer.java
deleted file mode 100644
index f76186f..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimeMillisConsumer.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers.logical;
-
-import java.io.IOException;
-
-import org.apache.arrow.consumers.BaseAvroConsumer;
-import org.apache.arrow.vector.TimeMilliVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume date time-millis values from avro decoder.
- * Write the data to {@link TimeMilliVector}.
- */
-public class AvroTimeMillisConsumer extends BaseAvroConsumer<TimeMilliVector> {
-
-  /**
-   * Instantiate a AvroTimeMilliConsumer.
-   */
-  public AvroTimeMillisConsumer(TimeMilliVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.set(currentIndex++, decoder.readInt());
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimestampMicrosConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimestampMicrosConsumer.java
deleted file mode 100644
index 82da0e8..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimestampMicrosConsumer.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers.logical;
-
-import java.io.IOException;
-
-import org.apache.arrow.consumers.BaseAvroConsumer;
-import org.apache.arrow.vector.TimeStampMicroVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume date timestamp-micro values from avro decoder.
- * Write the data to {@link TimeStampMicroVector}.
- */
-public class AvroTimestampMicrosConsumer extends BaseAvroConsumer<TimeStampMicroVector> {
-
-  /**
-   * Instantiate a AvroTimestampMicroConsumer.
-   */
-  public AvroTimestampMicrosConsumer(TimeStampMicroVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.set(currentIndex++, decoder.readLong());
-  }
-}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimestampMillisConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimestampMillisConsumer.java
deleted file mode 100644
index 159f49e..0000000
--- a/java/adapter/avro/src/main/java/org/apache/arrow/consumers/logical/AvroTimestampMillisConsumer.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.consumers.logical;
-
-import java.io.IOException;
-
-import org.apache.arrow.consumers.BaseAvroConsumer;
-import org.apache.arrow.vector.TimeStampMilliVector;
-import org.apache.avro.io.Decoder;
-
-/**
- * Consumer which consume date timestamp-millis values from avro decoder.
- * Write the data to {@link TimeStampMilliVector}.
- */
-public class AvroTimestampMillisConsumer extends BaseAvroConsumer<TimeStampMilliVector> {
-
-  /**
-   * Instantiate a AvroTimestampMillisConsumer.
-   */
-  public AvroTimestampMillisConsumer(TimeStampMilliVector vector) {
-    super(vector);
-  }
-
-  @Override
-  public void consume(Decoder decoder) throws IOException {
-    vector.set(currentIndex++, decoder.readLong());
-  }
-}
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/AvroLogicalTypesTest.java b/java/adapter/avro/src/test/java/org/apache/arrow/AvroLogicalTypesTest.java
deleted file mode 100644
index 050a50d..0000000
--- a/java/adapter/avro/src/test/java/org/apache/arrow/AvroLogicalTypesTest.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import static junit.framework.TestCase.assertNull;
-import static junit.framework.TestCase.assertTrue;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
-import java.math.BigDecimal;
-import java.nio.ByteBuffer;
-import java.time.LocalDateTime;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.arrow.vector.FieldVector;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.util.DateUtility;
-import org.apache.avro.Conversions;
-import org.apache.avro.Schema;
-import org.apache.avro.generic.GenericFixed;
-import org.junit.Test;
-
-public class AvroLogicalTypesTest extends AvroTestBase {
-
-  @Test
-  public void testTimestampMicros() throws Exception {
-    Schema schema = getSchema("logical/test_timestamp_micros.avsc");
-
-    List<Long> data = Arrays.asList(10000L, 20000L, 30000L, 40000L, 50000L);
-    List<LocalDateTime> expected = Arrays.asList(
-        DateUtility.getLocalDateTimeFromEpochMicro(10000),
-        DateUtility.getLocalDateTimeFromEpochMicro(20000),
-        DateUtility.getLocalDateTimeFromEpochMicro(30000),
-        DateUtility.getLocalDateTimeFromEpochMicro(40000),
-        DateUtility.getLocalDateTimeFromEpochMicro(50000)
-    );
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(expected, vector);
-  }
-
-  @Test
-  public void testTimestampMillis() throws Exception {
-    Schema schema = getSchema("logical/test_timestamp_millis.avsc");
-
-    List<Long> data = Arrays.asList(10000L, 20000L, 30000L, 40000L, 50000L);
-    List<LocalDateTime> expected = Arrays.asList(
-        DateUtility.getLocalDateTimeFromEpochMilli(10000),
-        DateUtility.getLocalDateTimeFromEpochMilli(20000),
-        DateUtility.getLocalDateTimeFromEpochMilli(30000),
-        DateUtility.getLocalDateTimeFromEpochMilli(40000),
-        DateUtility.getLocalDateTimeFromEpochMilli(50000)
-    );
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(expected, vector);
-  }
-
-  @Test
-  public void testTimeMicros() throws Exception {
-    Schema schema = getSchema("logical/test_time_micros.avsc");
-
-    List<Long> data = Arrays.asList(10000L, 20000L, 30000L, 40000L, 50000L);
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(data, vector);
-  }
-
-  @Test
-  public void testTimeMillis() throws Exception {
-    Schema schema = getSchema("logical/test_time_millis.avsc");
-
-    List<Integer> data = Arrays.asList(100, 200, 300, 400, 500);
-    List<LocalDateTime> expected = Arrays.asList(
-        DateUtility.getLocalDateTimeFromEpochMilli(100),
-        DateUtility.getLocalDateTimeFromEpochMilli(200),
-        DateUtility.getLocalDateTimeFromEpochMilli(300),
-        DateUtility.getLocalDateTimeFromEpochMilli(400),
-        DateUtility.getLocalDateTimeFromEpochMilli(500)
-    );
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(expected, vector);
-  }
-
-  @Test
-  public void testDate() throws Exception {
-    Schema schema = getSchema("logical/test_date.avsc");
-
-    List<Integer> data = Arrays.asList(100, 200, 300, 400, 500);
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(data, vector);
-  }
-
-  @Test
-  public void testDecimalWithOriginalBytes() throws Exception {
-    Schema schema = getSchema("logical/test_decimal_with_original_bytes.avsc");
-    List<ByteBuffer> data = new ArrayList<>();
-    List<BigDecimal> expected = new ArrayList<>();
-
-    Conversions.DecimalConversion conversion = new Conversions.DecimalConversion();
-
-    for (int i = 0; i < 5; i++) {
-      BigDecimal value = new BigDecimal(i * i).setScale(2);
-      ByteBuffer buffer = conversion.toBytes(value, schema, schema.getLogicalType());
-      data.add(buffer);
-      expected.add(value);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-    checkPrimitiveResult(expected, vector);
-
-  }
-
-  @Test
-  public void testDecimalWithOriginalFixed() throws Exception {
-    Schema schema = getSchema("logical/test_decimal_with_original_fixed.avsc");
-
-    List<GenericFixed> data = new ArrayList<>();
-    List<BigDecimal> expected = new ArrayList<>();
-
-    Conversions.DecimalConversion conversion = new Conversions.DecimalConversion();
-
-    for (int i = 0; i < 5; i++) {
-      BigDecimal value = new BigDecimal(i * i).setScale(2);
-      GenericFixed fixed = conversion.toFixed(value, schema, schema.getLogicalType());
-      data.add(fixed);
-      expected.add(value);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-    checkPrimitiveResult(expected, vector);
-  }
-
-  @Test
-  public void testInvalidDecimalPrecision() throws Exception {
-    Schema schema = getSchema("logical/test_decimal_invalid1.avsc");
-    List<ByteBuffer> data = new ArrayList<>();
-
-    Conversions.DecimalConversion conversion = new Conversions.DecimalConversion();
-
-    for (int i = 0; i < 5; i++) {
-      BigDecimal value = new BigDecimal(i * i).setScale(2);
-      ByteBuffer buffer = conversion.toBytes(value, schema, schema.getLogicalType());
-      data.add(buffer);
-    }
-
-    IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
-        () -> writeAndRead(schema, data));
-    assertTrue(e.getMessage().contains("Precision must be in range of 1 to 38"));
-
-  }
-
-  @Test
-  public void testFailedToCreateDecimalLogicalType() throws Exception {
-    // For decimal logical type, if avro validate schema failed, it will not create logical type,
-    // and the schema will be treated as its original type.
-
-    // java.lang.IllegalArgumentException: Invalid decimal scale: -1 (must be positive)
-    Schema schema1 = getSchema("logical/test_decimal_invalid2.avsc");
-    assertNull(schema1.getLogicalType());
-
-    // java.lang.IllegalArgumentException: Invalid decimal scale: 40 (greater than precision: 20)
-    Schema schema2 = getSchema("logical/test_decimal_invalid3.avsc");
-    assertNull(schema2.getLogicalType());
-
-    // java.lang.IllegalArgumentException: fixed(1) cannot store 30 digits (max 2)
-    Schema schema3 = getSchema("logical/test_decimal_invalid4.avsc");
-    assertNull(schema3.getLogicalType());
-  }
-
-}
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/AvroSkipFieldTest.java b/java/adapter/avro/src/test/java/org/apache/arrow/AvroSkipFieldTest.java
deleted file mode 100644
index b946dbd..0000000
--- a/java/adapter/avro/src/test/java/org/apache/arrow/AvroSkipFieldTest.java
+++ /dev/null
@@ -1,626 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import static org.junit.Assert.assertEquals;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.complex.StructVector;
-import org.apache.arrow.vector.types.Types;
-import org.apache.avro.Schema;
-import org.apache.avro.generic.GenericData;
-import org.apache.avro.generic.GenericRecord;
-import org.junit.Test;
-
-public class AvroSkipFieldTest extends AvroTestBase {
-
-  @Test
-  public void testSkipUnionWithOneField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f0");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_union_before.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_union_one_field_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, "test" + i);
-      record.put(1, i % 2 == 0 ? "test" + i : null);
-      record.put(2, i % 2 == 0 ? "test" + i : i);
-      record.put(3, i);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(1));
-      expectedRecord.put(1, record.get(2));
-      expectedRecord.put(2, record.get(3));
-      expectedData.add(expectedRecord);
-    }
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipUnionWithNullableOneField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f1");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_union_before.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_union_nullable_field_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, "test" + i);
-      record.put(1, i % 2 == 0 ? "test" + i : null);
-      record.put(2, i % 2 == 0 ? "test" + i : i);
-      record.put(3, i);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(0));
-      expectedRecord.put(1, record.get(2));
-      expectedRecord.put(2, record.get(3));
-      expectedData.add(expectedRecord);
-    }
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipUnionWithMultiFields() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f2");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_union_before.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_union_multi_fields_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, "test" + i);
-      record.put(1, i % 2 == 0 ? "test" + i : null);
-      record.put(2, i % 2 == 0 ? "test" + i : i);
-      record.put(3, i);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(0));
-      expectedRecord.put(1, record.get(1));
-      expectedRecord.put(2, record.get(3));
-      expectedData.add(expectedRecord);
-    }
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipMapField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f1");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_map_before.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_map_expected.avsc");
-
-    HashMap map = new HashMap();
-    map.put("key1", "value1");
-    map.put("key2", "value3");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, "test" + i);
-      record.put(1, map);
-      record.put(2, i % 2 == 0);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(0));
-      expectedRecord.put(1, record.get(2));
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipArrayField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f1");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_array_before.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_array_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, "test" + i);
-      record.put(1, Arrays.asList("test" + i, "test" + i));
-      record.put(2, i % 2 == 0);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(0));
-      expectedRecord.put(1, record.get(2));
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipMultiFields() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f1");
-    skipFieldNames.add("f2");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("test_record.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_multi_fields_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, "test" + i);
-      record.put(1, i);
-      record.put(2, i % 2 == 0);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(0));
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipStringField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f2");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_base1.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_string_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      final byte[] testBytes = ("test" + i).getBytes();
-      GenericRecord record = new GenericData.Record(schema);
-      GenericData.Fixed fixed = new GenericData.Fixed(schema.getField("f0").schema());
-      fixed.bytes(testBytes);
-      record.put(0, fixed);
-      GenericData.EnumSymbol symbol = new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
-      record.put(1, symbol);
-      record.put(2, "testtest" + i);
-      record.put(3, ByteBuffer.wrap(testBytes));
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, testBytes);
-      expectedRecord.put(1, (byte) i % 2);
-      expectedRecord.put(2, testBytes);
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipBytesField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f3");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_base1.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_bytes_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      final byte[] testBytes = ("test" + i).getBytes();
-      GenericRecord record = new GenericData.Record(schema);
-      GenericData.Fixed fixed = new GenericData.Fixed(schema.getField("f0").schema());
-      fixed.bytes(testBytes);
-      record.put(0, fixed);
-      GenericData.EnumSymbol symbol = new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
-      record.put(1, symbol);
-      record.put(2, "testtest" + i);
-      record.put(3, ByteBuffer.wrap(testBytes));
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, testBytes);
-      expectedRecord.put(1, (byte) i % 2);
-      expectedRecord.put(2, record.get(2));
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipFixedField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f0");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_base1.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_fixed_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      final byte[] testBytes = ("test" + i).getBytes();
-      GenericRecord record = new GenericData.Record(schema);
-      GenericData.Fixed fixed = new GenericData.Fixed(schema.getField("f0").schema());
-      fixed.bytes(testBytes);
-      record.put(0, fixed);
-      GenericData.EnumSymbol symbol = new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
-      record.put(1, symbol);
-      record.put(2, "testtest" + i);
-      record.put(3, ByteBuffer.wrap(testBytes));
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, (byte) i % 2);
-      expectedRecord.put(1, record.get(2));
-      expectedRecord.put(2, record.get(3));
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipEnumField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f1");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_base1.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_fixed_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      final byte[] testBytes = ("test" + i).getBytes();
-      GenericRecord record = new GenericData.Record(schema);
-      GenericData.Fixed fixed = new GenericData.Fixed(schema.getField("f0").schema());
-      fixed.bytes(testBytes);
-      record.put(0, fixed);
-      GenericData.EnumSymbol symbol = new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
-      record.put(1, symbol);
-      record.put(2, "testtest" + i);
-      record.put(3, ByteBuffer.wrap(testBytes));
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, testBytes);
-      expectedRecord.put(1, record.get(2));
-      expectedRecord.put(2, record.get(3));
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipBooleanField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f0");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_base2.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_boolean_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0);
-      record.put(1, i);
-      record.put(2, (long) i);
-      record.put(3, (float) i);
-      record.put(4, (double) i);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(1));
-      expectedRecord.put(1, record.get(2));
-      expectedRecord.put(2, record.get(3));
-      expectedRecord.put(3, record.get(4));
-
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipIntField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f1");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_base2.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_int_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0);
-      record.put(1, i);
-      record.put(2, (long) i);
-      record.put(3, (float) i);
-      record.put(4, (double) i);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(0));
-      expectedRecord.put(1, record.get(2));
-      expectedRecord.put(2, record.get(3));
-      expectedRecord.put(3, record.get(4));
-
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipLongField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f2");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_base2.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_long_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0);
-      record.put(1, i);
-      record.put(2, (long) i);
-      record.put(3, (float) i);
-      record.put(4, (double) i);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(0));
-      expectedRecord.put(1, record.get(1));
-      expectedRecord.put(2, record.get(3));
-      expectedRecord.put(3, record.get(4));
-
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipFloatField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f3");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_base2.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_float_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0);
-      record.put(1, i);
-      record.put(2, (long) i);
-      record.put(3, (float) i);
-      record.put(4, (double) i);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(0));
-      expectedRecord.put(1, record.get(1));
-      expectedRecord.put(2, record.get(2));
-      expectedRecord.put(3, record.get(4));
-
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipDoubleField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f4");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_base2.avsc");
-    Schema expectedSchema = getSchema("skip/test_skip_double_expected.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0);
-      record.put(1, i);
-      record.put(2, (long) i);
-      record.put(3, (float) i);
-      record.put(4, (double) i);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, record.get(0));
-      expectedRecord.put(1, record.get(1));
-      expectedRecord.put(2, record.get(2));
-      expectedRecord.put(3, record.get(3));
-
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipRecordField() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f0");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("skip/test_skip_record_before.avsc");
-    Schema nestedSchema = schema.getFields().get(0).schema();
-    ArrayList<GenericRecord> data = new ArrayList<>();
-
-    Schema expectedSchema = getSchema("skip/test_skip_record_expected.avsc");
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      GenericRecord nestedRecord = new GenericData.Record(nestedSchema);
-      nestedRecord.put(0, "test" + i);
-      nestedRecord.put(1, i);
-      record.put(0, nestedRecord);
-      record.put(1, i);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      expectedRecord.put(0, i);
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipNestedFields() throws Exception {
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f0.f0");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    Schema schema = getSchema("test_nested_record.avsc");
-    Schema nestedSchema = schema.getFields().get(0).schema();
-    ArrayList<GenericRecord> data = new ArrayList<>();
-
-    Schema expectedSchema = getSchema("skip/test_skip_second_level_expected.avsc");
-    Schema expectedNestedSchema = expectedSchema.getFields().get(0).schema();
-    ArrayList<GenericRecord> expectedData = new ArrayList<>();
-
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      GenericRecord nestedRecord = new GenericData.Record(nestedSchema);
-      nestedRecord.put(0, "test" + i);
-      nestedRecord.put(1, i);
-      record.put(0, nestedRecord);
-      data.add(record);
-
-      GenericRecord expectedRecord = new GenericData.Record(expectedSchema);
-      GenericRecord expectedNestedRecord = new GenericData.Record(expectedNestedSchema);
-      expectedNestedRecord.put(0, nestedRecord.get(1));
-      expectedRecord.put(0, expectedNestedRecord);
-      expectedData.add(expectedRecord);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkNestedRecordResult(expectedSchema, expectedData, root);
-  }
-
-  @Test
-  public void testSkipThirdLevelField() throws Exception {
-    Schema firstLevelSchema = getSchema("skip/test_skip_third_level_expected.avsc");
-    Schema secondLevelSchema = firstLevelSchema.getFields().get(0).schema();
-    Schema thirdLevelSchema = secondLevelSchema.getFields().get(0).schema();
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord firstLevelRecord = new GenericData.Record(firstLevelSchema);
-      GenericRecord secondLevelRecord = new GenericData.Record(secondLevelSchema);
-      GenericRecord thirdLevelRecord = new GenericData.Record(thirdLevelSchema);
-
-      thirdLevelRecord.put(0, i);
-      thirdLevelRecord.put(1, "test" + i);
-      thirdLevelRecord.put(2, i % 2 == 0);
-
-      secondLevelRecord.put(0, thirdLevelRecord);
-      firstLevelRecord.put(0, secondLevelRecord);
-      data.add(firstLevelRecord);
-    }
-
-    // do not skip any fields first
-    VectorSchemaRoot root1 = writeAndRead(firstLevelSchema, data);
-
-    assertEquals(1, root1.getFieldVectors().size());
-    assertEquals(Types.MinorType.STRUCT, root1.getFieldVectors().get(0).getMinorType());
-    StructVector secondLevelVector = (StructVector) root1.getFieldVectors().get(0);
-    assertEquals(1, secondLevelVector.getChildrenFromFields().size());
-    assertEquals(Types.MinorType.STRUCT, secondLevelVector.getChildrenFromFields().get(0).getMinorType());
-    StructVector thirdLevelVector = (StructVector) secondLevelVector.getChildrenFromFields().get(0);
-    assertEquals(3, thirdLevelVector.getChildrenFromFields().size());
-
-    // skip third level field and validate
-    Set<String> skipFieldNames = new HashSet<>();
-    skipFieldNames.add("f0.f0.f0");
-    config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
-    VectorSchemaRoot root2 = writeAndRead(firstLevelSchema, data);
-
-    assertEquals(1, root2.getFieldVectors().size());
-    assertEquals(Types.MinorType.STRUCT, root2.getFieldVectors().get(0).getMinorType());
-    StructVector secondStruct = (StructVector) root2.getFieldVectors().get(0);
-    assertEquals(1, secondStruct.getChildrenFromFields().size());
-    assertEquals(Types.MinorType.STRUCT, secondStruct.getChildrenFromFields().get(0).getMinorType());
-    StructVector thirdStruct = (StructVector) secondStruct.getChildrenFromFields().get(0);
-    assertEquals(2, thirdStruct.getChildrenFromFields().size());
-
-    assertEquals(Types.MinorType.INT, thirdStruct.getChildrenFromFields().get(0).getMinorType());
-    assertEquals(Types.MinorType.BIT, thirdStruct.getChildrenFromFields().get(1).getMinorType());
-  }
-}
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/AvroTestBase.java b/java/adapter/avro/src/test/java/org/apache/arrow/AvroTestBase.java
deleted file mode 100644
index f24f0f1..0000000
--- a/java/adapter/avro/src/test/java/org/apache/arrow/AvroTestBase.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.nio.ByteBuffer;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.arrow.memory.BufferAllocator;
-import org.apache.arrow.memory.RootAllocator;
-import org.apache.arrow.vector.FieldVector;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.complex.ListVector;
-import org.apache.arrow.vector.complex.StructVector;
-import org.apache.arrow.vector.util.JsonStringArrayList;
-import org.apache.arrow.vector.util.Text;
-import org.apache.avro.Schema;
-import org.apache.avro.generic.GenericDatumWriter;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.avro.io.BinaryDecoder;
-import org.apache.avro.io.BinaryEncoder;
-import org.apache.avro.io.DatumWriter;
-import org.apache.avro.io.DecoderFactory;
-import org.apache.avro.io.EncoderFactory;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.rules.TemporaryFolder;
-
-public class AvroTestBase {
-
-  @ClassRule
-  public static final TemporaryFolder TMP = new TemporaryFolder();
-
-  protected AvroToArrowConfig config;
-
-  @Before
-  public void init() {
-    BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE);
-    config = new AvroToArrowConfigBuilder(allocator).build();
-  }
-
-  protected Schema getSchema(String schemaName) throws Exception {
-    Path schemaPath = Paths.get(TestWriteReadAvroRecord.class.getResource("/").getPath(),
-        "schema", schemaName);
-    return new Schema.Parser().parse(schemaPath.toFile());
-  }
-
-  protected VectorSchemaRoot writeAndRead(Schema schema, List data) throws Exception {
-    File dataFile = TMP.newFile();
-
-    BinaryEncoder
-        encoder = new EncoderFactory().directBinaryEncoder(new FileOutputStream(dataFile), null);
-    DatumWriter writer = new GenericDatumWriter(schema);
-    BinaryDecoder
-        decoder = new DecoderFactory().directBinaryDecoder(new FileInputStream(dataFile), null);
-
-    for (Object value : data) {
-      writer.write(value, encoder);
-    }
-
-    return AvroToArrow.avroToArrow(schema, decoder, config);
-  }
-
-  protected void checkArrayResult(List<List<?>> expected, ListVector vector) {
-    assertEquals(expected.size(), vector.getValueCount());
-    for (int i = 0; i < expected.size(); i++) {
-      checkArrayElement(expected.get(i), (JsonStringArrayList) vector.getObject(i));
-    }
-  }
-
-  protected void checkArrayElement(List expected, List actual) {
-    assertEquals(expected.size(), actual.size());
-    for (int i = 0; i < expected.size(); i++) {
-      Object value1 = expected.get(i);
-      Object value2 = actual.get(i);
-      if (value1 == null) {
-        assertTrue(value2 == null);
-        continue;
-      }
-      if (value2 instanceof byte[]) {
-        value2 = ByteBuffer.wrap((byte[]) value2);
-      } else if (value2 instanceof Text) {
-        value2 = value2.toString();
-      }
-      assertEquals(value1, value2);
-    }
-  }
-
-  protected void checkPrimitiveResult(List data, FieldVector vector) {
-    assertEquals(data.size(), vector.getValueCount());
-    for (int i = 0; i < data.size(); i++) {
-      Object value1 = data.get(i);
-      Object value2 = vector.getObject(i);
-      if (value1 == null) {
-        assertTrue(value2 == null);
-        continue;
-      }
-      if (value2 instanceof byte[]) {
-        value2 = ByteBuffer.wrap((byte[]) value2);
-        if (value1 instanceof byte[]) {
-          value1 = ByteBuffer.wrap((byte[]) value1);
-        }
-      } else if (value2 instanceof Text) {
-        value2 = value2.toString();
-      } else if (value2 instanceof Byte) {
-        value2 = ((Byte) value2).intValue();
-      }
-      assertEquals(value1, value2);
-    }
-  }
-
-  protected void checkRecordResult(Schema schema, ArrayList<GenericRecord> data, VectorSchemaRoot root) {
-    assertEquals(data.size(), root.getRowCount());
-    assertEquals(schema.getFields().size(), root.getFieldVectors().size());
-
-    for (int i = 0; i < schema.getFields().size(); i++) {
-      ArrayList fieldData = new ArrayList();
-      for (GenericRecord record : data) {
-        fieldData.add(record.get(i));
-      }
-
-      checkPrimitiveResult(fieldData, root.getFieldVectors().get(i));
-    }
-
-  }
-
-  protected void checkNestedRecordResult(Schema schema, List<GenericRecord> data, VectorSchemaRoot root) {
-    assertEquals(data.size(), root.getRowCount());
-    assertTrue(schema.getFields().size() == 1);
-
-    final Schema nestedSchema = schema.getFields().get(0).schema();
-    final StructVector structVector = (StructVector) root.getFieldVectors().get(0);
-
-    for (int i = 0; i < nestedSchema.getFields().size(); i++) {
-      ArrayList fieldData = new ArrayList();
-      for (GenericRecord record : data) {
-        GenericRecord nestedRecord = (GenericRecord) record.get(0);
-        fieldData.add(nestedRecord.get(i));
-      }
-
-      checkPrimitiveResult(fieldData, structVector.getChildrenFromFields().get(i));
-    }
-
-  }
-
-
-  // belows are for iterator api
-
-  protected void checkArrayResult(List<List<?>> expected, List<ListVector> vectors) {
-    int valueCount = vectors.stream().mapToInt(v -> v.getValueCount()).sum();
-    assertEquals(expected.size(), valueCount);
-
-    int index = 0;
-    for (ListVector vector : vectors) {
-      for (int i = 0; i < vector.getValueCount(); i++) {
-        checkArrayElement(expected.get(index++), (JsonStringArrayList) vector.getObject(i));
-      }
-    }
-  }
-
-  protected void checkRecordResult(Schema schema, ArrayList<GenericRecord> data, List<VectorSchemaRoot> roots) {
-    roots.forEach(root -> {
-      assertEquals(schema.getFields().size(), root.getFieldVectors().size());
-    });
-
-    for (int i = 0; i < schema.getFields().size(); i++) {
-      List fieldData = new ArrayList();
-      List<FieldVector> vectors = new ArrayList<>();
-      for (GenericRecord record : data) {
-        fieldData.add(record.get(i));
-      }
-      final int columnIndex = i;
-      roots.forEach(root -> vectors.add(root.getFieldVectors().get(columnIndex)));
-
-      checkPrimitiveResult(fieldData, vectors);
-    }
-
-  }
-
-  protected void checkPrimitiveResult(List data, List<FieldVector> vectors) {
-    int valueCount = vectors.stream().mapToInt(v -> v.getValueCount()).sum();
-    assertEquals(data.size(), valueCount);
-
-    int index = 0;
-    for (FieldVector vector : vectors) {
-      for (int i = 0; i < vector.getValueCount(); i++) {
-        Object value1 = data.get(index++);
-        Object value2 = vector.getObject(i);
-        if (value1 == null) {
-          assertNull(value2);
-          continue;
-        }
-        if (value2 instanceof byte[]) {
-          value2 = ByteBuffer.wrap((byte[]) value2);
-          if (value1 instanceof byte[]) {
-            value1 = ByteBuffer.wrap((byte[]) value1);
-          }
-        } else if (value2 instanceof Text) {
-          value2 = value2.toString();
-        }
-        assertEquals(value1, value2);
-      }
-    }
-  }
-}
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/AvroToArrowIteratorTest.java b/java/adapter/avro/src/test/java/org/apache/arrow/AvroToArrowIteratorTest.java
deleted file mode 100644
index 2b05a19..0000000
--- a/java/adapter/avro/src/test/java/org/apache/arrow/AvroToArrowIteratorTest.java
+++ /dev/null
@@ -1,313 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.EOFException;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.arrow.memory.BufferAllocator;
-import org.apache.arrow.memory.RootAllocator;
-import org.apache.arrow.util.AutoCloseables;
-import org.apache.arrow.vector.FieldVector;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.complex.ListVector;
-import org.apache.avro.Schema;
-import org.apache.avro.generic.GenericData;
-import org.apache.avro.generic.GenericDatumWriter;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.avro.io.BinaryDecoder;
-import org.apache.avro.io.BinaryEncoder;
-import org.apache.avro.io.DatumWriter;
-import org.apache.avro.io.Decoder;
-import org.apache.avro.io.DecoderFactory;
-import org.apache.avro.io.EncoderFactory;
-import org.apache.avro.util.Utf8;
-import org.junit.Test;
-
-public class AvroToArrowIteratorTest extends AvroTestBase {
-
-  @Override
-  public void init() {
-    final BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE);
-    this.config = new AvroToArrowConfigBuilder(allocator).setTargetBatchSize(3).build();
-  }
-
-  private AvroToArrowVectorIterator convert(Schema schema, List data) throws Exception {
-    File dataFile = TMP.newFile();
-
-    BinaryEncoder
-        encoder = new EncoderFactory().directBinaryEncoder(new FileOutputStream(dataFile), null);
-    DatumWriter writer = new GenericDatumWriter(schema);
-    BinaryDecoder
-        decoder = new DecoderFactory().directBinaryDecoder(new FileInputStream(dataFile), null);
-
-    for (Object value : data) {
-      writer.write(value, encoder);
-    }
-
-    return AvroToArrow.avroToArrowIterator(schema, decoder, config);
-  }
-
-  @Test
-  public void testStringType() throws Exception {
-    Schema schema = getSchema("test_primitive_string.avsc");
-    List<String> data = Arrays.asList("v1", "v2", "v3", "v4", "v5");
-
-    List<VectorSchemaRoot> roots = new ArrayList<>();
-    List<FieldVector> vectors = new ArrayList<>();
-    try (AvroToArrowVectorIterator iterator = convert(schema, data)) {
-      while (iterator.hasNext()) {
-        VectorSchemaRoot root = iterator.next();
-        FieldVector vector = root.getFieldVectors().get(0);
-        roots.add(root);
-        vectors.add(vector);
-      }
-    }
-    checkPrimitiveResult(data, vectors);
-    AutoCloseables.close(roots);
-  }
-
-  @Test
-  public void testNullableStringType() throws Exception {
-    Schema schema = getSchema("test_nullable_string.avsc");
-
-    List<GenericRecord> data = new ArrayList<>();
-    List<String> expected = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      String value = i % 2 == 0 ? "test" + i : null;
-      record.put(0, value);
-      expected.add(value);
-      data.add(record);
-    }
-
-    List<VectorSchemaRoot> roots = new ArrayList<>();
-    List<FieldVector> vectors = new ArrayList<>();
-    try (AvroToArrowVectorIterator iterator = convert(schema, data);) {
-      while (iterator.hasNext()) {
-        VectorSchemaRoot root = iterator.next();
-        FieldVector vector = root.getFieldVectors().get(0);
-        roots.add(root);
-        vectors.add(vector);
-      }
-    }
-    checkPrimitiveResult(expected, vectors);
-    AutoCloseables.close(roots);
-
-  }
-
-  @Test
-  public void testRecordType() throws Exception {
-    Schema schema = getSchema("test_record.avsc");
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, "test" + i);
-      record.put(1, i);
-      record.put(2, i % 2 == 0);
-      data.add(record);
-    }
-
-    List<VectorSchemaRoot> roots = new ArrayList<>();
-    try (AvroToArrowVectorIterator iterator = convert(schema, data)) {
-      while (iterator.hasNext()) {
-        roots.add(iterator.next());
-      }
-    }
-    checkRecordResult(schema, data, roots);
-    AutoCloseables.close(roots);
-
-  }
-
-  @Test
-  public void testArrayType() throws Exception {
-    Schema schema = getSchema("test_array.avsc");
-    List<List<?>> data = Arrays.asList(
-        Arrays.asList("11", "222", "999"),
-        Arrays.asList("12222", "2333", "1000"),
-        Arrays.asList("1rrr", "2ggg"),
-        Arrays.asList("1vvv", "2bbb"),
-        Arrays.asList("1fff", "2"));
-
-    List<VectorSchemaRoot> roots = new ArrayList<>();
-    List<ListVector> vectors = new ArrayList<>();
-    try (AvroToArrowVectorIterator iterator = convert(schema, data)) {
-      while (iterator.hasNext()) {
-        VectorSchemaRoot root = iterator.next();
-        roots.add(root);
-        vectors.add((ListVector) root.getFieldVectors().get(0));
-      }
-    }
-    checkArrayResult(data, vectors);
-    AutoCloseables.close(roots);
-  }
-
-  @Test
-  public void runLargeNumberOfRows() throws Exception {
-    Schema schema = getSchema("test_large_data.avsc");
-    int x = 0;
-    final int targetRows = 600000;
-    Decoder fakeDecoder = new FakeDecoder(targetRows);
-    try (AvroToArrowVectorIterator iter = AvroToArrow.avroToArrowIterator(schema, fakeDecoder,
-            new AvroToArrowConfigBuilder(config.getAllocator()).build())) {
-      while (iter.hasNext()) {
-        VectorSchemaRoot root = iter.next();
-        x += root.getRowCount();
-        root.close();
-      }
-    }
-
-    assertEquals(x, targetRows);
-  }
-
-  /**
-   * Fake avro decoder to test large data.
-   */
-  private class FakeDecoder extends Decoder {
-
-    private int numRows;
-
-    FakeDecoder(int numRows) {
-      this.numRows = numRows;
-    }
-
-    // note that Decoder has no hasNext() API, assume enum is the first type in schema
-    // and fixed is the last type in schema and they are unique.
-    private void validate() throws EOFException {
-      if (numRows <= 0) {
-        throw new EOFException();
-      }
-    }
-
-    @Override
-    public void readNull() throws IOException {
-    }
-
-    @Override
-    public boolean readBoolean() throws IOException {
-      return false;
-    }
-
-    @Override
-    public int readInt() throws IOException {
-      return 0;
-    }
-
-    @Override
-    public long readLong() throws IOException {
-      return 0;
-    }
-
-    @Override
-    public float readFloat() throws IOException {
-      return 0;
-    }
-
-    @Override
-    public double readDouble() throws IOException {
-      return 0;
-    }
-
-    @Override
-    public Utf8 readString(Utf8 old) throws IOException {
-      return new Utf8("test123test123" + numRows);
-    }
-
-    @Override
-    public String readString() throws IOException {
-      return "test123test123" + numRows;
-    }
-
-    @Override
-    public void skipString() throws IOException {
-
-    }
-
-    @Override
-    public ByteBuffer readBytes(ByteBuffer old) throws IOException {
-      return ByteBuffer.allocate(0);
-    }
-
-    @Override
-    public void skipBytes() throws IOException {
-
-    }
-
-    @Override
-    public void readFixed(byte[] bytes, int start, int length) throws IOException {
-      // fixed type is last column, after read value, decrease numRows
-      numRows--;
-    }
-
-    @Override
-    public void skipFixed(int length) throws IOException {
-
-    }
-
-    @Override
-    public int readEnum() throws IOException {
-      // enum type is first column, validate numRows first.
-      validate();
-      return 0;
-    }
-
-    @Override
-    public long readArrayStart() throws IOException {
-      return 5;
-    }
-
-    @Override
-    public long arrayNext() throws IOException {
-      return 0;
-    }
-
-    @Override
-    public long skipArray() throws IOException {
-      return 0;
-    }
-
-    @Override
-    public long readMapStart() throws IOException {
-      return 5;
-    }
-
-    @Override
-    public long mapNext() throws IOException {
-      return 0;
-    }
-
-    @Override
-    public long skipMap() throws IOException {
-      return 0;
-    }
-
-    @Override
-    public int readIndex() throws IOException {
-      return 0;
-    }
-  }
-}
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/AvroToArrowTest.java b/java/adapter/avro/src/test/java/org/apache/arrow/AvroToArrowTest.java
deleted file mode 100644
index c007e1a..0000000
--- a/java/adapter/avro/src/test/java/org/apache/arrow/AvroToArrowTest.java
+++ /dev/null
@@ -1,477 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import static org.junit.Assert.assertEquals;
-
-import java.nio.ByteBuffer;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.arrow.vector.FieldVector;
-import org.apache.arrow.vector.VarCharVector;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.complex.ListVector;
-import org.apache.arrow.vector.complex.MapVector;
-import org.apache.arrow.vector.complex.StructVector;
-import org.apache.avro.Schema;
-import org.apache.avro.generic.GenericData;
-import org.apache.avro.generic.GenericRecord;
-import org.junit.Test;
-
-public class AvroToArrowTest extends AvroTestBase {
-
-  @Test
-  public void testStringType() throws Exception {
-    Schema schema = getSchema("test_primitive_string.avsc");
-    List<String> data = Arrays.asList("v1", "v2", "v3", "v4", "v5");
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(data, vector);
-  }
-
-  @Test
-  public void testNullableStringType() throws Exception {
-    Schema schema = getSchema("test_nullable_string.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0 ? "test" + i : null);
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(schema, data, root);
-  }
-
-  @Test
-  public void testRecordType() throws Exception {
-    Schema schema = getSchema("test_record.avsc");
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, "test" + i);
-      record.put(1, i);
-      record.put(2, i % 2 == 0);
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(schema, data, root);
-  }
-
-  @Test
-  public void testFixedAttributes() throws Exception {
-    Schema schema = getSchema("attrs/test_fixed_attr.avsc");
-
-    List<GenericData.Fixed> data = new ArrayList<>();
-    List<byte[]> expected = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      byte[] value = ("value" + i).getBytes(StandardCharsets.UTF_8);
-      expected.add(value);
-      GenericData.Fixed fixed = new GenericData.Fixed(schema);
-      fixed.bytes(value);
-      data.add(fixed);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    Map<String, String> metadata = vector.getField().getMetadata();
-    assertEquals("fixed doc", metadata.get("doc"));
-    assertEquals("[\"alias1\",\"alias2\"]", metadata.get("aliases"));
-  }
-
-  @Test
-  public void testEnumAttributes() throws Exception {
-    Schema schema = getSchema("attrs/test_enum_attrs.avsc");
-    List<GenericData.EnumSymbol> data = Arrays.asList(
-        new GenericData.EnumSymbol(schema, "SPADES"),
-        new GenericData.EnumSymbol(schema, "HEARTS"),
-        new GenericData.EnumSymbol(schema, "DIAMONDS"),
-        new GenericData.EnumSymbol(schema, "CLUBS"),
-        new GenericData.EnumSymbol(schema, "SPADES"));
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    Map<String, String> metadata = vector.getField().getMetadata();
-    assertEquals("enum doc", metadata.get("doc"));
-    assertEquals("[\"alias1\",\"alias2\"]", metadata.get("aliases"));
-  }
-
-  @Test
-  public void testRecordAttributes() throws Exception {
-    Schema schema = getSchema("attrs/test_record_attrs.avsc");
-    Schema nestedSchema = schema.getFields().get(0).schema();
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      GenericRecord nestedRecord = new GenericData.Record(nestedSchema);
-      nestedRecord.put(0, "test" + i);
-      nestedRecord.put(1, i);
-      record.put(0, nestedRecord);
-
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-
-    StructVector structVector = (StructVector) root.getFieldVectors().get(0);
-    Map<String, String> structMeta = structVector.getField().getMetadata();
-    Map<String, String> childMeta1 = structVector.getChildByOrdinal(0).getField().getMetadata();
-    Map<String, String> childMeta2 = structVector.getChildByOrdinal(1).getField().getMetadata();
-
-    assertEquals("f0 doc", structMeta.get("doc"));
-    assertEquals("[\"f0.a1\"]", structMeta.get("aliases"));
-    assertEquals("f1 doc", childMeta1.get("doc"));
-    assertEquals("[\"f1.a1\",\"f1.a2\"]", childMeta1.get("aliases"));
-    assertEquals("f2 doc", childMeta2.get("doc"));
-    assertEquals("[\"f2.a1\",\"f2.a2\"]", childMeta2.get("aliases"));
-  }
-
-  @Test
-  public void testNestedRecordType() throws Exception {
-    Schema schema = getSchema("test_nested_record.avsc");
-    Schema nestedSchema = schema.getFields().get(0).schema();
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      GenericRecord nestedRecord = new GenericData.Record(nestedSchema);
-      nestedRecord.put(0, "test" + i);
-      nestedRecord.put(1, i);
-      record.put(0, nestedRecord);
-
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkNestedRecordResult(schema, data, root);
-  }
-
-  @Test
-  public void testEnumType() throws Exception {
-    Schema schema = getSchema("test_primitive_enum.avsc");
-    List<GenericData.EnumSymbol> data = Arrays.asList(
-        new GenericData.EnumSymbol(schema, "SPADES"),
-        new GenericData.EnumSymbol(schema, "HEARTS"),
-        new GenericData.EnumSymbol(schema, "DIAMONDS"),
-        new GenericData.EnumSymbol(schema, "CLUBS"),
-        new GenericData.EnumSymbol(schema, "SPADES"));
-
-    List<Integer> expectedIndices = Arrays.asList(0, 1, 2, 3, 0);
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(expectedIndices, vector);
-
-    VarCharVector dictVector = (VarCharVector) config.getProvider().lookup(0).getVector();
-    assertEquals(4, dictVector.getValueCount());
-
-    assertEquals("SPADES", dictVector.getObject(0).toString());
-    assertEquals("HEARTS", dictVector.getObject(1).toString());
-    assertEquals("DIAMONDS", dictVector.getObject(2).toString());
-    assertEquals("CLUBS", dictVector.getObject(3).toString());
-  }
-
-  @Test
-  public void testIntType() throws Exception {
-    Schema schema = getSchema("test_primitive_int.avsc");
-    List<Integer> data = Arrays.asList(1, 2, 3, 4, 5);
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(data, vector);
-  }
-
-  @Test
-  public void testNullableIntType() throws Exception {
-    Schema schema = getSchema("test_nullable_int.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0 ? i : null);
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(schema, data, root);
-  }
-
-  @Test
-  public void testLongType() throws Exception {
-    Schema schema = getSchema("test_primitive_long.avsc");
-    List<Long> data = Arrays.asList(1L, 2L, 3L, 4L, 5L);
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(data, vector);
-  }
-
-  @Test
-  public void testNullableLongType() throws Exception {
-    Schema schema = getSchema("test_nullable_long.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0 ? (long) i : null);
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(schema, data, root);
-  }
-
-  @Test
-  public void testFloatType() throws Exception {
-    Schema schema = getSchema("test_primitive_float.avsc");
-    List<Float> data = Arrays.asList(1.1f, 2.2f, 3.3f, 4.4f, 5.5f);
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(data, vector);
-  }
-
-  @Test
-  public void testNullableFloatType() throws Exception {
-    Schema schema = getSchema("test_nullable_float.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0 ? i + 0.1f : null);
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(schema, data, root);
-  }
-
-  @Test
-  public void testDoubleType() throws Exception {
-    Schema schema = getSchema("test_primitive_double.avsc");
-    List<Double> data = Arrays.asList(1.1, 2.2, 3.3, 4.4, 5.5);
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(data, vector);
-  }
-
-  @Test
-  public void testNullableDoubleType() throws Exception {
-    Schema schema = getSchema("test_nullable_double.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0 ? i + 0.1 : null);
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(schema, data, root);
-  }
-
-  @Test
-  public void testBytesType() throws Exception {
-    Schema schema = getSchema("test_primitive_bytes.avsc");
-    List<ByteBuffer> data = Arrays.asList(
-        ByteBuffer.wrap("value1".getBytes(StandardCharsets.UTF_8)),
-        ByteBuffer.wrap("value2".getBytes(StandardCharsets.UTF_8)),
-        ByteBuffer.wrap("value3".getBytes(StandardCharsets.UTF_8)),
-        ByteBuffer.wrap("value4".getBytes(StandardCharsets.UTF_8)),
-        ByteBuffer.wrap("value5".getBytes(StandardCharsets.UTF_8)));
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(data, vector);
-  }
-
-  @Test
-  public void testNullableBytesType() throws Exception {
-    Schema schema = getSchema("test_nullable_bytes.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0 ? ByteBuffer.wrap(("test" + i).getBytes(StandardCharsets.UTF_8)) : null);
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(schema, data, root);
-  }
-
-  @Test
-  public void testBooleanType() throws Exception {
-    Schema schema = getSchema("test_primitive_boolean.avsc");
-    List<Boolean> data = Arrays.asList(true, false, true, false, true);
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(data, vector);
-  }
-
-  @Test
-  public void testNullableBooleanType() throws Exception {
-    Schema schema = getSchema("test_nullable_boolean.avsc");
-
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0 ? true : null);
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    checkRecordResult(schema, data, root);
-  }
-
-  @Test
-  public void testArrayType() throws Exception {
-    Schema schema = getSchema("test_array.avsc");
-    List<List<?>> data = Arrays.asList(
-        Arrays.asList("11", "222", "999"),
-        Arrays.asList("12222", "2333", "1000"),
-        Arrays.asList("1rrr", "2ggg"),
-        Arrays.asList("1vvv", "2bbb"),
-        Arrays.asList("1fff", "2"));
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkArrayResult(data, (ListVector) vector);
-  }
-
-  @Test
-  public void testMapType() throws Exception {
-    Schema schema = getSchema("test_map.avsc");
-
-    List keys = Arrays.asList("key1", "key2", "key3", "key4", "key5", "key6");
-    List vals = Arrays.asList("val1", "val2", "val3", "val4", "val5", "val6");
-
-    List<LinkedHashMap> data = new ArrayList<>();
-    LinkedHashMap map1 = new LinkedHashMap();
-    map1.put(keys.get(0), vals.get(0));
-    map1.put(keys.get(1), vals.get(1));
-    data.add(map1);
-
-    LinkedHashMap map2 = new LinkedHashMap();
-    map2.put(keys.get(2), vals.get(2));
-    map2.put(keys.get(3), vals.get(3));
-    data.add(map2);
-
-    LinkedHashMap map3 = new LinkedHashMap();
-    map3.put(keys.get(4), vals.get(4));
-    map3.put(keys.get(5), vals.get(5));
-    data.add(map3);
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    MapVector vector = (MapVector) root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(keys, vector.getDataVector().getChildrenFromFields().get(0));
-    checkPrimitiveResult(vals, vector.getDataVector().getChildrenFromFields().get(1));
-    assertEquals(0, vector.getOffsetBuffer().getInt(0));
-    assertEquals(2, vector.getOffsetBuffer().getInt(1 * 4));
-    assertEquals(4, vector.getOffsetBuffer().getInt(2 * 4));
-    assertEquals(6, vector.getOffsetBuffer().getInt(3 * 4));
-  }
-
-  @Test
-  public void testFixedType() throws Exception {
-    Schema schema = getSchema("test_fixed.avsc");
-
-    List<GenericData.Fixed> data = new ArrayList<>();
-    List<byte[]> expected = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      byte[] value = ("value" + i).getBytes(StandardCharsets.UTF_8);
-      expected.add(value);
-      GenericData.Fixed fixed = new GenericData.Fixed(schema);
-      fixed.bytes(value);
-      data.add(fixed);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(expected, vector);
-  }
-
-  @Test
-  public void testUnionType() throws Exception {
-    Schema schema = getSchema("test_union.avsc");
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<Object> expected = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put(0, i % 2 == 0 ? "test" + i : i);
-      expected.add(i % 2 == 0 ? "test" + i : i);
-      data.add(record);
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(expected, vector);
-  }
-
-  @Test
-  public void testNullableUnionType() throws Exception {
-    Schema schema = getSchema("test_nullable_union.avsc");
-    ArrayList<GenericRecord> data = new ArrayList<>();
-    ArrayList<Object> expected = new ArrayList<>();
-    for (int i = 0; i < 5; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      if (i % 3 == 0) {
-        record.put(0, "test" + i);
-        expected.add("test" + i);
-        data.add(record);
-      } else if (i % 3 == 1) {
-        record.put(0, i);
-        expected.add(i);
-        data.add(record);
-      } else {
-        record.put(0, null);
-        expected.add(null);
-        data.add(record);
-      }
-    }
-
-    VectorSchemaRoot root = writeAndRead(schema, data);
-    FieldVector vector = root.getFieldVectors().get(0);
-
-    checkPrimitiveResult(expected, vector);
-  }
-
-}
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/TestWriteReadAvroRecord.java b/java/adapter/avro/src/test/java/org/apache/arrow/TestWriteReadAvroRecord.java
deleted file mode 100644
index bf695d1..0000000
--- a/java/adapter/avro/src/test/java/org/apache/arrow/TestWriteReadAvroRecord.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.File;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.avro.Schema;
-import org.apache.avro.file.DataFileReader;
-import org.apache.avro.file.DataFileWriter;
-import org.apache.avro.generic.GenericData;
-import org.apache.avro.generic.GenericDatumReader;
-import org.apache.avro.generic.GenericDatumWriter;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.avro.io.DatumReader;
-import org.apache.avro.io.DatumWriter;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-
-
-public class TestWriteReadAvroRecord {
-
-  @ClassRule
-  public static final TemporaryFolder TMP = new TemporaryFolder();
-
-  @Test
-  public void testWriteAndRead() throws Exception {
-
-    File dataFile = TMP.newFile();
-    Path schemaPath = Paths.get(TestWriteReadAvroRecord.class.getResource("/").getPath(), "schema", "test.avsc");
-    Schema schema = new Schema.Parser().parse(schemaPath.toFile());
-
-    //write data to disk
-    GenericRecord user1 = new GenericData.Record(schema);
-    user1.put("name", "Alyssa");
-    user1.put("favorite_number", 256);
-
-    GenericRecord user2 = new GenericData.Record(schema);
-    user2.put("name", "Ben");
-    user2.put("favorite_number", 7);
-    user2.put("favorite_color", "red");
-
-    DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema);
-    DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(datumWriter);
-    dataFileWriter.create(schema, dataFile);
-    dataFileWriter.append(user1);
-    dataFileWriter.append(user2);
-    dataFileWriter.close();
-
-    //read data from disk
-    DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
-    DataFileReader<GenericRecord>
-        dataFileReader = new DataFileReader<GenericRecord>(dataFile, datumReader);
-    List<GenericRecord> result = new ArrayList<>();
-    while (dataFileReader.hasNext()) {
-      GenericRecord user = dataFileReader.next();
-      result.add(user);
-    }
-
-    assertEquals(2, result.size());
-    GenericRecord deUser1 = result.get(0);
-    assertEquals("Alyssa", deUser1.get("name").toString());
-    assertEquals(256, deUser1.get("favorite_number"));
-    assertEquals(null, deUser1.get("favorite_color"));
-
-    GenericRecord deUser2 = result.get(1);
-    assertEquals("Ben", deUser2.get("name").toString());
-    assertEquals(7, deUser2.get("favorite_number"));
-    assertEquals("red", deUser2.get("favorite_color").toString());
-  }
-
-}
diff --git a/java/adapter/avro/src/test/resources/schema/attrs/test_enum_attrs.avsc b/java/adapter/avro/src/test/resources/schema/attrs/test_enum_attrs.avsc
deleted file mode 100644
index afd00b8..0000000
--- a/java/adapter/avro/src/test/resources/schema/attrs/test_enum_attrs.avsc
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "type": "enum",
- "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"],
- "name": "testEnum",
- "doc" : "enum doc",
- "aliases" : ["alias1", "alias2"]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/attrs/test_fixed_attr.avsc b/java/adapter/avro/src/test/resources/schema/attrs/test_fixed_attr.avsc
deleted file mode 100644
index 55e504d..0000000
--- a/java/adapter/avro/src/test/resources/schema/attrs/test_fixed_attr.avsc
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "type": "fixed",
- "size": 6,
- "name": "testFixed",
- "doc" : "fixed doc",
- "aliases" : ["alias1", "alias2"]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/attrs/test_record_attrs.avsc b/java/adapter/avro/src/test/resources/schema/attrs/test_record_attrs.avsc
deleted file mode 100644
index 2e2e311..0000000
--- a/java/adapter/avro/src/test/resources/schema/attrs/test_record_attrs.avsc
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testAttrs",
- "fields": [
-  {
-    "name" : "f0",
-    "type" : {
-        "type" : "record",
-        "name" : "nestedInRecord",
-        "doc" : "f0 doc",
-        "aliases" : ["f0.a1"],
-        "fields": [
-             {"name": "f1", "type": "string", "doc": "f1 doc", "aliases" : ["f1.a1", "f1.a2"]},
-             {"name": "f2", "type": "int", "doc": "f2 doc", "aliases" : ["f2.a1", "f2.a2"]}
-        ]
-    }
-  }
-  ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_date.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_date.avsc
deleted file mode 100644
index f661e65..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_date.avsc
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "int",
- "logicalType" : "date"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid1.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid1.avsc
deleted file mode 100644
index 18d7d63..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid1.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "bytes",
- "logicalType" : "decimal",
- "precision": 39,
- "scale": 2
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid2.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid2.avsc
deleted file mode 100644
index eed7bd7..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid2.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "bytes",
- "logicalType" : "decimal",
- "precision": 20,
- "scale": -1
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid3.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid3.avsc
deleted file mode 100644
index 1667b8a..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid3.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "bytes",
- "logicalType" : "decimal",
- "precision": 20,
- "scale": 40
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid4.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid4.avsc
deleted file mode 100644
index e1f7104..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_invalid4.avsc
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "fixed",
- "size" : 1,
- "logicalType" : "decimal",
- "precision": 30,
- "scale": 2
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_with_original_bytes.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_decimal_with_original_bytes.avsc
deleted file mode 100644
index 944b5d8..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_with_original_bytes.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "bytes",
- "logicalType" : "decimal",
- "precision": 10,
- "scale": 2
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_with_original_fixed.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_decimal_with_original_fixed.avsc
deleted file mode 100644
index 1901f90..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_decimal_with_original_fixed.avsc
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "fixed",
- "size" : 10,
- "logicalType" : "decimal",
- "precision": 10,
- "scale": 2
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_time_micros.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_time_micros.avsc
deleted file mode 100644
index ee7d4e9..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_time_micros.avsc
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "long",
- "logicalType" : "time-micros"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_time_millis.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_time_millis.avsc
deleted file mode 100644
index 54877ba..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_time_millis.avsc
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "int",
- "logicalType" : "time-millis"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_timestamp_micros.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_timestamp_micros.avsc
deleted file mode 100644
index 15c0bf5..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_timestamp_micros.avsc
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "long",
- "logicalType" : "timestamp-micros"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/logical/test_timestamp_millis.avsc b/java/adapter/avro/src/test/resources/schema/logical/test_timestamp_millis.avsc
deleted file mode 100644
index 822a2c3..0000000
--- a/java/adapter/avro/src/test/resources/schema/logical/test_timestamp_millis.avsc
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "name": "test",
- "type": "long",
- "logicalType" : "timestamp-millis"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_array_before.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_array_before.avsc
deleted file mode 100644
index e836aa7..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_array_before.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "test",
- "fields": [
-     {"name": "f0", "type": "string"},
-     {"name": "f1", "type": {"type" : "array", "items": "string"}},
-     {"name": "f2", "type": "boolean"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_array_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_array_expected.avsc
deleted file mode 100644
index 36e7fdf..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_array_expected.avsc
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "test",
- "fields": [
-     {"name": "f0", "type": "string"},
-     {"name": "f2", "type": "boolean"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_base1.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_base1.avsc
deleted file mode 100644
index 5338253..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_base1.avsc
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f0", "type": {"type" : "fixed", "size":5, "name" : "fix"}},
-     {"name": "f1", "type": {"type" : "enum", "name" : "enum", "symbols": ["TEST0", "TEST1"]}},
-     {"name": "f2", "type": "string"},
-     {"name": "f3", "type": "bytes"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_base2.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_base2.avsc
deleted file mode 100644
index 50655a7..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_base2.avsc
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f0", "type": "boolean"},
-     {"name": "f1", "type": "int"},
-     {"name": "f2", "type": "long"},
-     {"name": "f3", "type": "float"},
-     {"name": "f4", "type": "double"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_boolean_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_boolean_expected.avsc
deleted file mode 100644
index 9b62e31..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_boolean_expected.avsc
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f1", "type": "int"},
-     {"name": "f2", "type": "long"},
-     {"name": "f3", "type": "float"},
-     {"name": "f4", "type": "double"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_bytes_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_bytes_expected.avsc
deleted file mode 100644
index 8a1903b..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_bytes_expected.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f0", "type": {"type" : "fixed", "size":5, "name" : "fix"}},
-     {"name": "f1", "type": {"type" : "enum", "name" : "enum", "symbols": ["TEST0", "TEST1"]}},
-     {"name": "f2", "type": "string"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_double_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_double_expected.avsc
deleted file mode 100644
index 6021c44..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_double_expected.avsc
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f0", "type": "boolean"},
-     {"name": "f1", "type": "int"},
-     {"name": "f2", "type": "long"},
-     {"name": "f3", "type": "float"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_enum_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_enum_expected.avsc
deleted file mode 100644
index f5ed86a..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_enum_expected.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f0", "type": {"type" : "fixed", "size":5, "name" : "fix"}},
-     {"name": "f2", "type": "string"},
-     {"name": "f3", "type": "bytes"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_fixed_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_fixed_expected.avsc
deleted file mode 100644
index 5423a79..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_fixed_expected.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f1", "type": {"type" : "enum", "name" : "enum", "symbols": ["TEST0", "TEST1"]}},
-     {"name": "f2", "type": "string"},
-     {"name": "f3", "type": "bytes"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_float_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_float_expected.avsc
deleted file mode 100644
index dea1063..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_float_expected.avsc
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f0", "type": "boolean"},
-     {"name": "f1", "type": "int"},
-     {"name": "f2", "type": "long"},
-     {"name": "f4", "type": "double"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_int_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_int_expected.avsc
deleted file mode 100644
index 53d4f10..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_int_expected.avsc
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f0", "type": "boolean"},
-     {"name": "f2", "type": "long"},
-     {"name": "f3", "type": "float"},
-     {"name": "f4", "type": "double"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_long_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_long_expected.avsc
deleted file mode 100644
index bf16601..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_long_expected.avsc
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f0", "type": "boolean"},
-     {"name": "f1", "type": "int"},
-     {"name": "f3", "type": "float"},
-     {"name": "f4", "type": "double"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_map_before.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_map_before.avsc
deleted file mode 100644
index 8cbb1a1..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_map_before.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "test",
- "fields": [
-     {"name": "f0", "type": "string"},
-     {"name": "f1", "type": {"type" : "map", "values": "string"}},
-     {"name": "f2", "type": "boolean"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_map_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_map_expected.avsc
deleted file mode 100644
index 36e7fdf..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_map_expected.avsc
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "test",
- "fields": [
-     {"name": "f0", "type": "string"},
-     {"name": "f2", "type": "boolean"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_multi_fields_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_multi_fields_expected.avsc
deleted file mode 100644
index b5d637b..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_multi_fields_expected.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testSkip",
- "fields": [
-     {"name": "f0", "type": "string"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_record_before.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_record_before.avsc
deleted file mode 100644
index 7aee92b..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_record_before.avsc
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "test",
- "fields": [
-  {
-    "name" : "f0",
-    "type" : {
-        "type" : "record",
-        "name" : "nestedInRecord",
-        "fields": [
-             {"name": "f00", "type": "string"},
-             {"name": "f01", "type": "int"}
-        ]
-    }
-  },
-  {
-    "name" : "f1", "type" : "int"
-  }
-  ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_record_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_record_expected.avsc
deleted file mode 100644
index 3e24952..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_record_expected.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "test",
- "fields": [
-    { "name" : "f1", "type" : "int"}
-  ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_second_level_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_second_level_expected.avsc
deleted file mode 100644
index f3b7f8c..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_second_level_expected.avsc
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testSkipNested",
- "fields": [
-  {
-    "name" : "nested",
-    "type" : {
-        "type" : "record",
-        "name" : "nestedInRecord",
-        "fields": [
-             {"name": "f1", "type": "int"}
-        ]
-    }
-  }
-  ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_single_field_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_single_field_expected.avsc
deleted file mode 100644
index 5535258..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_single_field_expected.avsc
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testSkip",
- "fields": [
-     {"name": "f0", "type": "string"},
-     {"name": "f2", "type": "boolean"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_string_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_string_expected.avsc
deleted file mode 100644
index 2d2c081..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_string_expected.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f0", "type": {"type" : "fixed", "size":5, "name" : "fix"}},
-     {"name": "f1", "type": {"type" : "enum", "name" : "enum", "symbols": ["TEST0", "TEST1"]}},
-     {"name": "f3", "type": "bytes"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_third_level_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_third_level_expected.avsc
deleted file mode 100644
index 6f42da8..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_third_level_expected.avsc
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "firstLevel",
- "fields": [
-  {
-    "name" : "f0",
-    "type" : {
-        "type" : "record",
-        "name" : "secondLevel",
-        "fields": [
-        {
-             "name" : "f0",
-             "type" : {
-                "type" : "record",
-                "name" : "thirdLevel",
-                "fields" : [
-                    {"name": "f1", "type": "int"},
-                    {"name": "f0", "type": "string"},
-                    {"name": "f2", "type": "boolean"}
-                ]
-             }
-        }
-        ]
-    }
-  }
-  ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_before.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_before.avsc
deleted file mode 100644
index fc11059..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_before.avsc
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "test",
- "fields": [
-     {"name": "f0", "type": ["string"]},
-     {"name": "f1", "type": ["string", "null"]},
-     {"name": "f2", "type": ["string", "int"]},
-     {"name": "f3", "type": "int"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_multi_fields_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_multi_fields_expected.avsc
deleted file mode 100644
index 308e027..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_multi_fields_expected.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "test",
- "fields": [
-     {"name": "f0", "type": ["string"]},
-     {"name": "f1", "type": ["string", "null"]},
-     {"name": "f3", "type": "int"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_nullable_field_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_nullable_field_expected.avsc
deleted file mode 100644
index cbc83e5..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_nullable_field_expected.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "test",
- "fields": [
-     {"name": "f0", "type": ["string"]},
-     {"name": "f2", "type": ["string", "int"]},
-     {"name": "f3", "type": "int"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_one_field_expected.avsc b/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_one_field_expected.avsc
deleted file mode 100644
index 0f72fb4..0000000
--- a/java/adapter/avro/src/test/resources/schema/skip/test_skip_union_one_field_expected.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "test",
- "fields": [
-     {"name": "f1", "type": ["string", "null"]},
-     {"name": "f2", "type": ["string", "int"]},
-     {"name": "f3", "type": ["string", "int"]}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test.avsc b/java/adapter/avro/src/test/resources/schema/test.avsc
deleted file mode 100644
index 92c0873..0000000
--- a/java/adapter/avro/src/test/resources/schema/test.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "User",
- "fields": [
-     {"name": "name", "type": "string"},
-     {"name": "favorite_number",  "type": ["int", "null"]},
-     {"name": "favorite_color", "type": ["string", "null"]}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_array.avsc b/java/adapter/avro/src/test/resources/schema/test_array.avsc
deleted file mode 100644
index 5b75a40..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_array.avsc
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "array",
- "items": "string",
- "name": "testArray"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_fixed.avsc b/java/adapter/avro/src/test/resources/schema/test_fixed.avsc
deleted file mode 100644
index a4d96e9..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_fixed.avsc
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "fixed",
- "size": 6,
- "name": "testFixed"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_large_data.avsc b/java/adapter/avro/src/test/resources/schema/test_large_data.avsc
deleted file mode 100644
index f784ae6..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_large_data.avsc
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testLargeData",
- "fields": [
-    {
-        "name": "f0",
-        "type": {
-          "name" : "f0",
-          "type" : "enum",
-          "symbols" : ["value1", "value2", "value3", "value4", "value5"]
-        }
-    },
-    {
-        "name" : "f1",
-        "type" : {
-            "type" : "record",
-            "name" : "nestedRecord",
-            "fields": [
-                 {"name": "f1_0", "type": "string"},
-                 {"name": "f1_1", "type": "int"}
-            ]
-        }
-    },
-
-    {"name": "f2", "type": "string"},
-    {"name": "f3", "type": "int"},
-    {"name": "f4", "type": "boolean"},
-    {"name": "f5", "type": "float"},
-    {"name": "f6", "type": "double"},
-    {"name": "f7", "type": "bytes"},
-    {"name": "f8", "type": ["string", "int"]},
-    {
-        "name": "f9",
-        "type": {
-            "name" : "f9",
-            "type" : "array",
-            "items" : "string"
-        }
-    },
-    {
-        "name": "f10",
-        "type": {
-            "name" : "f10",
-            "type" : "map",
-            "values" : "string"
-        }
-    },
-    {
-      "name": "f11",
-      "type": {
-          "type" : "fixed",
-          "name" : "f11",
-          "size" : 5
-      }
-    }
-  ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_map.avsc b/java/adapter/avro/src/test/resources/schema/test_map.avsc
deleted file mode 100644
index 0dfa3a5..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_map.avsc
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "map",
- "values": "string",
- "name": "testMap"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_nested_record.avsc b/java/adapter/avro/src/test/resources/schema/test_nested_record.avsc
deleted file mode 100644
index 29dddfd..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_nested_record.avsc
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testNestedRecord",
- "fields": [
-  {
-    "name" : "f0",
-    "type" : {
-        "type" : "record",
-        "name" : "nestedInRecord",
-        "fields": [
-             {"name": "f0", "type": "string"},
-             {"name": "f1", "type": "int"}
-        ]
-    }
-  }
-  ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_nullable_boolean.avsc b/java/adapter/avro/src/test/resources/schema/test_nullable_boolean.avsc
deleted file mode 100644
index 62af1a8..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_nullable_boolean.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "nullableBoolean",
- "fields": [
-     {"name": "f0", "type": ["null", "boolean"]}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_nullable_bytes.avsc b/java/adapter/avro/src/test/resources/schema/test_nullable_bytes.avsc
deleted file mode 100644
index 002bc7c..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_nullable_bytes.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "nullableBytes",
- "fields": [
-     {"name": "f0", "type": ["null", "bytes"]}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_nullable_double.avsc b/java/adapter/avro/src/test/resources/schema/test_nullable_double.avsc
deleted file mode 100644
index 642b7aa..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_nullable_double.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "nullableDouble",
- "fields": [
-     {"name": "f0", "type": ["null", "double"]}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_nullable_float.avsc b/java/adapter/avro/src/test/resources/schema/test_nullable_float.avsc
deleted file mode 100644
index dff2859..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_nullable_float.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "nullableFloat",
- "fields": [
-     {"name": "f0", "type": ["null", "float"]}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_nullable_int.avsc b/java/adapter/avro/src/test/resources/schema/test_nullable_int.avsc
deleted file mode 100644
index abb2fc4..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_nullable_int.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "nullableInt",
- "fields": [
-     {"name": "f0", "type": ["null", "int"]}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_nullable_long.avsc b/java/adapter/avro/src/test/resources/schema/test_nullable_long.avsc
deleted file mode 100644
index 0624d27..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_nullable_long.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "nullableLong",
- "fields": [
-     {"name": "f0", "type": ["null", "long"]}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_nullable_string.avsc b/java/adapter/avro/src/test/resources/schema/test_nullable_string.avsc
deleted file mode 100644
index 347808c..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_nullable_string.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "nullableString",
- "fields": [
-     {"name": "f0", "type": ["null", "string"]}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_nullable_union.avsc b/java/adapter/avro/src/test/resources/schema/test_nullable_union.avsc
deleted file mode 100644
index af94812..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_nullable_union.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testNullableUnions",
- "fields": [
-     {"name": "f0", "type": ["string", "int", "null"]}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_primitive_boolean.avsc b/java/adapter/avro/src/test/resources/schema/test_primitive_boolean.avsc
deleted file mode 100644
index 7652ce7..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_primitive_boolean.avsc
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "boolean",
- "name": "TestBoolean"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_primitive_bytes.avsc b/java/adapter/avro/src/test/resources/schema/test_primitive_bytes.avsc
deleted file mode 100644
index 5102430..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_primitive_bytes.avsc
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "bytes",
- "name": "TestBytes"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_primitive_double.avsc b/java/adapter/avro/src/test/resources/schema/test_primitive_double.avsc
deleted file mode 100644
index d1ae0b6..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_primitive_double.avsc
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "double",
- "name": "TestDouble"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_primitive_enum.avsc b/java/adapter/avro/src/test/resources/schema/test_primitive_enum.avsc
deleted file mode 100644
index bd8df61..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_primitive_enum.avsc
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "enum",
- "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"],
- "name": "testEnum"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_primitive_float.avsc b/java/adapter/avro/src/test/resources/schema/test_primitive_float.avsc
deleted file mode 100644
index 675d109..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_primitive_float.avsc
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "float",
- "name": "TestFloat"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_primitive_int.avsc b/java/adapter/avro/src/test/resources/schema/test_primitive_int.avsc
deleted file mode 100644
index 8fc8488..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_primitive_int.avsc
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "int",
- "name": "TestInt"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_primitive_long.avsc b/java/adapter/avro/src/test/resources/schema/test_primitive_long.avsc
deleted file mode 100644
index b970610..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_primitive_long.avsc
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "long",
- "name": "TestLong"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_primitive_string.avsc b/java/adapter/avro/src/test/resources/schema/test_primitive_string.avsc
deleted file mode 100644
index b4a89a7..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_primitive_string.avsc
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "string",
- "name": "TestString"
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_record.avsc b/java/adapter/avro/src/test/resources/schema/test_record.avsc
deleted file mode 100644
index e83cf11..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_record.avsc
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testRecord",
- "fields": [
-     {"name": "f0", "type": "string"},
-     {"name": "f1", "type": "int"},
-     {"name": "f2", "type": "boolean"}
- ]
-}
diff --git a/java/adapter/avro/src/test/resources/schema/test_union.avsc b/java/adapter/avro/src/test/resources/schema/test_union.avsc
deleted file mode 100644
index f181e36..0000000
--- a/java/adapter/avro/src/test/resources/schema/test_union.avsc
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-{
- "namespace": "org.apache.arrow.avro",
- "type": "record",
- "name": "testUnions",
- "fields": [
-     {"name": "f0", "type": ["string", "int"]}
- ]
-}
diff --git a/java/adapter/jdbc/pom.xml b/java/adapter/jdbc/pom.xml
deleted file mode 100644
index b75135f..0000000
--- a/java/adapter/jdbc/pom.xml
+++ /dev/null
@@ -1,109 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor
-    license agreements. See the NOTICE file distributed with this work for additional
-    information regarding copyright ownership. The ASF licenses this file to
-    You under the Apache License, Version 2.0 (the "License"); you may not use
-    this file except in compliance with the License. You may obtain a copy of
-    the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
-    by applicable law or agreed to in writing, software distributed under the
-    License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
-    OF ANY KIND, either express or implied. See the License for the specific
-    language governing permissions and limitations under the License. -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <parent>
-        <groupId>org.apache.arrow</groupId>
-        <artifactId>arrow-java-root</artifactId>
-        <version>4.0.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-
-    <artifactId>arrow-jdbc</artifactId>
-    <name>Arrow JDBC Adapter</name>
-    <description>(Contrib/Experimental)A library for converting JDBC data to Arrow data.</description>
-    <url>http://maven.apache.org</url>
-
-    <dependencies>
-
-        <!-- https://mvnrepository.com/artifact/org.apache.arrow/arrow-memory-core -->
-        <dependency>
-            <groupId>org.apache.arrow</groupId>
-            <artifactId>arrow-memory-core</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <!-- https://mvnrepository.com/artifact/org.apache.arrow/arrow-memory-netty -->
-        <dependency>
-            <groupId>org.apache.arrow</groupId>
-            <artifactId>arrow-memory-netty</artifactId>
-            <version>${project.version}</version>
-            <scope>runtime</scope>
-        </dependency>
-
-        <!-- https://mvnrepository.com/artifact/org.apache.arrow/arrow-vector -->
-        <dependency>
-            <groupId>org.apache.arrow</groupId>
-            <artifactId>arrow-vector</artifactId>
-            <version>${project.version}</version>
-            <classifier>${arrow.vector.classifier}</classifier>
-        </dependency>
-
-        <!-- https://mvnrepository.com/artifact/com.h2database/h2 -->
-        <dependency>
-            <groupId>com.h2database</groupId>
-            <artifactId>h2</artifactId>
-            <version>1.4.196</version>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.fasterxml.jackson.dataformat</groupId>
-            <artifactId>jackson-dataformat-yaml</artifactId>
-            <version>${dep.jackson.version}</version>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-            <version>${dep.jackson.version}</version>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-core</artifactId>
-            <version>${dep.jackson.version}</version>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-annotations</artifactId>
-            <version>${dep.jackson.version}</version>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-common</artifactId>
-        </dependency>
-
-    </dependencies>
-
-    <build>	
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <configuration>
-                    <systemPropertyVariables>
-                        <user.timezone>UTC</user.timezone>
-                    </systemPropertyVariables>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-</project>
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/ArrowVectorIterator.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/ArrowVectorIterator.java
deleted file mode 100644
index b8796cd..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/ArrowVectorIterator.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc;
-
-import static org.apache.arrow.adapter.jdbc.JdbcToArrowUtils.isColumnNullable;
-
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.util.Iterator;
-
-import org.apache.arrow.adapter.jdbc.consumer.CompositeJdbcConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.JdbcConsumer;
-import org.apache.arrow.util.Preconditions;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.types.pojo.ArrowType;
-import org.apache.arrow.vector.types.pojo.Schema;
-import org.apache.arrow.vector.util.ValueVectorUtility;
-
-/**
- * VectorSchemaRoot iterator for partially converting JDBC data.
- */
-public class ArrowVectorIterator implements Iterator<VectorSchemaRoot>, AutoCloseable {
-
-  private final ResultSet resultSet;
-  private final JdbcToArrowConfig config;
-
-  private final Schema schema;
-  private final ResultSetMetaData rsmd;
-
-  private final JdbcConsumer[] consumers;
-  final CompositeJdbcConsumer compositeConsumer;
-
-  private VectorSchemaRoot nextBatch;
-
-  private final int targetBatchSize;
-
-  /**
-   * Construct an instance.
-   */
-  private ArrowVectorIterator(ResultSet resultSet, JdbcToArrowConfig config) throws SQLException {
-    this.resultSet = resultSet;
-    this.config = config;
-    this.schema = JdbcToArrowUtils.jdbcToArrowSchema(resultSet.getMetaData(), config);
-    this.targetBatchSize = config.getTargetBatchSize();
-
-    rsmd = resultSet.getMetaData();
-    consumers = new JdbcConsumer[rsmd.getColumnCount()];
-    this.compositeConsumer = new CompositeJdbcConsumer(consumers);
-  }
-
-  private void initialize() throws SQLException {
-    // create consumers
-    for (int i = 1; i <= consumers.length; i++) {
-      ArrowType arrowType = config.getJdbcToArrowTypeConverter()
-          .apply(new JdbcFieldInfo(resultSet.getMetaData(), i));
-      consumers[i - 1] = JdbcToArrowUtils.getConsumer(
-          arrowType, i, isColumnNullable(resultSet, i), null, config);
-    }
-
-    load(createVectorSchemaRoot());
-  }
-
-  /**
-   * Create a ArrowVectorIterator to partially convert data.
-   */
-  public static ArrowVectorIterator create(
-      ResultSet resultSet,
-      JdbcToArrowConfig config)
-      throws SQLException {
-
-    ArrowVectorIterator iterator = new ArrowVectorIterator(resultSet, config);
-    try {
-      iterator.initialize();
-      return iterator;
-    } catch (Exception e) {
-      iterator.close();
-      throw new RuntimeException("Error occurred while creating iterator.", e);
-    }
-  }
-
-  private void consumeData(VectorSchemaRoot root) {
-    // consume data
-    try {
-      int readRowCount = 0;
-      if (targetBatchSize == JdbcToArrowConfig.NO_LIMIT_BATCH_SIZE) {
-        while (resultSet.next()) {
-          ValueVectorUtility.ensureCapacity(root, readRowCount + 1);
-          compositeConsumer.consume(resultSet);
-          readRowCount++;
-        }
-      } else {
-        while (readRowCount < targetBatchSize && resultSet.next()) {
-          compositeConsumer.consume(resultSet);
-          readRowCount++;
-        }
-      }
-
-
-      root.setRowCount(readRowCount);
-    } catch (Exception e) {
-      compositeConsumer.close();
-      throw new RuntimeException("Error occurred while consuming data.", e);
-    }
-  }
-
-  private VectorSchemaRoot createVectorSchemaRoot() {
-    VectorSchemaRoot root = null;
-    try {
-      root = VectorSchemaRoot.create(schema, config.getAllocator());
-      if (config.getTargetBatchSize() != JdbcToArrowConfig.NO_LIMIT_BATCH_SIZE) {
-        ValueVectorUtility.preAllocate(root, config.getTargetBatchSize());
-      }
-    } catch (Exception e) {
-      if (root != null) {
-        root.close();
-      }
-      throw new RuntimeException("Error occurred while creating schema root.", e);
-    }
-    return root;
-  }
-
-  // Loads the next schema root or null if no more rows are available.
-  private void load(VectorSchemaRoot root) throws SQLException {
-
-    for (int i = 1; i <= consumers.length; i++) {
-      consumers[i - 1].resetValueVector(root.getVector(rsmd.getColumnName(i)));
-    }
-
-    consumeData(root);
-
-    if (root.getRowCount() == 0) {
-      root.close();
-      nextBatch = null;
-    } else {
-      nextBatch = root;
-    }
-  }
-
-  @Override
-  public boolean hasNext() {
-    return nextBatch != null;
-  }
-
-  /**
-   * Gets the next vector. The user is responsible for freeing its resources.
-   */
-  @Override
-  public VectorSchemaRoot next() {
-    Preconditions.checkArgument(hasNext());
-    VectorSchemaRoot returned = nextBatch;
-    try {
-      load(createVectorSchemaRoot());
-    } catch (Exception e) {
-      close();
-      throw new RuntimeException("Error occurred while getting next schema root.", e);
-    }
-    return returned;
-  }
-
-  /**
-   * Clean up resources.
-   */
-  @Override
-  public void close() {
-    if (nextBatch != null) {
-      nextBatch.close();
-    }
-    compositeConsumer.close();
-  }
-}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/Constants.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/Constants.java
deleted file mode 100644
index aaadacb..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/Constants.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc;
-
-/**
- * String constants used for metadata returned on Vectors.
- */
-public class Constants {
-  private Constants() {}
-
-  public static final String SQL_CATALOG_NAME_KEY = "SQL_CATALOG_NAME";
-  public static final String SQL_TABLE_NAME_KEY = "SQL_TABLE_NAME";
-  public static final String SQL_COLUMN_NAME_KEY = "SQL_COLUMN_NAME";
-  public static final String SQL_TYPE_KEY = "SQL_TYPE";
-
-}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcFieldInfo.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcFieldInfo.java
deleted file mode 100644
index e3747bb..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcFieldInfo.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc;
-
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.sql.Types;
-
-import org.apache.arrow.util.Preconditions;
-
-/**
- * This class represents the information about a JDBC ResultSet Field that is
- * needed to construct an {@link org.apache.arrow.vector.types.pojo.ArrowType}.
- * Currently, this is:
- * <ul>
- *   <li>The JDBC {@link java.sql.Types} type.</li>
- *   <li>The field's precision (used for {@link java.sql.Types#DECIMAL} and {@link java.sql.Types#NUMERIC} types)</li>
- *   <li>The field's scale (used for {@link java.sql.Types#DECIMAL} and {@link java.sql.Types#NUMERIC} types)</li>
- * </ul>
- */
-public class JdbcFieldInfo {
-  private final int jdbcType;
-  private final int precision;
-  private final int scale;
-
-  /**
-   * Builds a <code>JdbcFieldInfo</code> using only the {@link java.sql.Types} type.  Do not use this constructor
-   * if the field type is {@link java.sql.Types#DECIMAL} or {@link java.sql.Types#NUMERIC}; the precision and
-   * scale will be set to <code>0</code>.
-   *
-   * @param jdbcType The {@link java.sql.Types} type.
-   * @throws IllegalArgumentException if jdbcType is {@link java.sql.Types#DECIMAL} or {@link java.sql.Types#NUMERIC}.
-   */
-  public JdbcFieldInfo(int jdbcType) {
-    Preconditions.checkArgument(
-        (jdbcType != Types.DECIMAL && jdbcType != Types.NUMERIC),
-        "DECIMAL and NUMERIC types require a precision and scale; please use another constructor.");
-
-    this.jdbcType = jdbcType;
-    this.precision = 0;
-    this.scale = 0;
-  }
-
-  /**
-   * Builds a <code>JdbcFieldInfo</code> from the {@link java.sql.Types} type, precision, and scale.
-   * Use this constructor for {@link java.sql.Types#DECIMAL} and {@link java.sql.Types#NUMERIC} types.
-   *
-   * @param jdbcType The {@link java.sql.Types} type.
-   * @param precision The field's numeric precision.
-   * @param scale The field's numeric scale.
-   */
-  public JdbcFieldInfo(int jdbcType, int precision, int scale) {
-    this.jdbcType = jdbcType;
-    this.precision = precision;
-    this.scale = scale;
-  }
-
-  /**
-   * Builds a <code>JdbcFieldInfo</code> from the corresponding {@link java.sql.ResultSetMetaData} column.
-   *
-   * @param rsmd The {@link java.sql.ResultSetMetaData} to get the field information from.
-   * @param column The column to get the field information for (on a 1-based index).
-   * @throws SQLException If the column information cannot be retrieved.
-   * @throws NullPointerException if <code>rsmd</code> is <code>null</code>.
-   * @throws IllegalArgumentException if <code>column</code> is out of bounds.
-   */
-  public JdbcFieldInfo(ResultSetMetaData rsmd, int column) throws SQLException {
-    Preconditions.checkNotNull(rsmd, "ResultSetMetaData cannot be null.");
-    Preconditions.checkArgument(column > 0, "ResultSetMetaData columns have indices starting at 1.");
-    Preconditions.checkArgument(
-        column <= rsmd.getColumnCount(),
-        "The index must be within the number of columns (1 to %s, inclusive)", rsmd.getColumnCount());
-
-    this.jdbcType = rsmd.getColumnType(column);
-    this.precision = rsmd.getPrecision(column);
-    this.scale = rsmd.getScale(column);
-  }
-
-  /**
-   * The {@link java.sql.Types} type.
-   */
-  public int getJdbcType() {
-    return jdbcType;
-  }
-
-  /**
-   * The numeric precision, for {@link java.sql.Types#NUMERIC}  and {@link java.sql.Types#DECIMAL} types.
-   */
-  public int getPrecision() {
-    return precision;
-  }
-
-  /**
-   * The numeric scale, for {@link java.sql.Types#NUMERIC}  and {@link java.sql.Types#DECIMAL} types.
-   */
-  public int getScale() {
-    return scale;
-  }
-}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java
deleted file mode 100644
index c65523d..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java
+++ /dev/null
@@ -1,273 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Calendar;
-
-import org.apache.arrow.memory.BufferAllocator;
-import org.apache.arrow.memory.RootAllocator;
-import org.apache.arrow.util.Preconditions;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.util.ValueVectorUtility;
-
-/**
- * Utility class to convert JDBC objects to columnar Arrow format objects.
- *
- * <p>This utility uses following data mapping to map JDBC/SQL datatype to Arrow data types.
- *
- * <p>CHAR --> ArrowType.Utf8
- * NCHAR --> ArrowType.Utf8
- * VARCHAR --> ArrowType.Utf8
- * NVARCHAR --> ArrowType.Utf8
- * LONGVARCHAR --> ArrowType.Utf8
- * LONGNVARCHAR --> ArrowType.Utf8
- * NUMERIC --> ArrowType.Decimal(precision, scale)
- * DECIMAL --> ArrowType.Decimal(precision, scale)
- * BIT --> ArrowType.Bool
- * TINYINT --> ArrowType.Int(8, signed)
- * SMALLINT --> ArrowType.Int(16, signed)
- * INTEGER --> ArrowType.Int(32, signed)
- * BIGINT --> ArrowType.Int(64, signed)
- * REAL --> ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE)
- * FLOAT --> ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE)
- * DOUBLE --> ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE)
- * BINARY --> ArrowType.Binary
- * VARBINARY --> ArrowType.Binary
- * LONGVARBINARY --> ArrowType.Binary
- * DATE --> ArrowType.Date(DateUnit.MILLISECOND)
- * TIME --> ArrowType.Time(TimeUnit.MILLISECOND, 32)
- * TIMESTAMP --> ArrowType.Timestamp(TimeUnit.MILLISECOND, timezone=null)
- * CLOB --> ArrowType.Utf8
- * BLOB --> ArrowType.Binary
- *
- * @since 0.10.0
- */
-public class JdbcToArrow {
-
-  /**
-   * For the given SQL query, execute and fetch the data from Relational DB and convert it to Arrow objects.
-   * This method uses the default Calendar instance with default TimeZone and Locale as returned by the JVM.
-   * If you wish to use specific TimeZone or Locale for any Date, Time and Timestamp datasets, you may want use
-   * overloaded API that taken Calendar object instance.
-   *
-   * @param connection Database connection to be used. This method will not close the passed connection object. Since
-   *                   the caller has passed the connection object it's the responsibility of the caller to close or
-   *                   return the connection to the pool.
-   * @param query      The DB Query to fetch the data.
-   * @param allocator  Memory allocator
-   * @return Arrow Data Objects {@link VectorSchemaRoot}
-   * @throws SQLException Propagate any SQL Exceptions to the caller after closing any resources opened such as
-   *                      ResultSet and Statement objects.
-   */
-  @Deprecated
-  public static VectorSchemaRoot sqlToArrow(Connection connection, String query, BufferAllocator allocator)
-      throws SQLException, IOException {
-    Preconditions.checkNotNull(allocator, "Memory allocator object can not be null");
-
-    JdbcToArrowConfig config =
-        new JdbcToArrowConfig(allocator, JdbcToArrowUtils.getUtcCalendar());
-    return sqlToArrow(connection, query, config);
-  }
-
-  /**
-   * For the given SQL query, execute and fetch the data from Relational DB and convert it to Arrow objects.
-   *
-   * @param connection Database connection to be used. This method will not close the passed connection object. Since
-   *                   the caller has passed the connection object it's the responsibility of the caller to close or
-   *                   return the connection to the pool.
-   * @param query      The DB Query to fetch the data.
-   * @param allocator  Memory allocator
-   * @param calendar   Calendar object to use to handle Date, Time and Timestamp datasets.
-   * @return Arrow Data Objects {@link VectorSchemaRoot}
-   * @throws SQLException Propagate any SQL Exceptions to the caller after closing any resources opened such as
-   *                      ResultSet and Statement objects.
-   */
-  @Deprecated
-  public static VectorSchemaRoot sqlToArrow(
-      Connection connection,
-      String query,
-      BufferAllocator allocator,
-      Calendar calendar) throws SQLException, IOException {
-
-    Preconditions.checkNotNull(allocator, "Memory allocator object can not be null");
-    Preconditions.checkNotNull(calendar, "Calendar object can not be null");
-
-    return sqlToArrow(connection, query, new JdbcToArrowConfig(allocator, calendar));
-  }
-
-  /**
-   * For the given SQL query, execute and fetch the data from Relational DB and convert it to Arrow objects.
-   *
-   * @param connection Database connection to be used. This method will not close the passed connection object.
-   *                   Since the caller has passed the connection object it's the responsibility of the caller
-   *                   to close or return the connection to the pool.
-   * @param query      The DB Query to fetch the data.
-   * @param config     Configuration
-   * @return Arrow Data Objects {@link VectorSchemaRoot}
-   * @throws SQLException Propagate any SQL Exceptions to the caller after closing any resources opened such as
-   *                      ResultSet and Statement objects.
-   */
-  @Deprecated
-  public static VectorSchemaRoot sqlToArrow(Connection connection, String query, JdbcToArrowConfig config)
-      throws SQLException, IOException {
-    Preconditions.checkNotNull(connection, "JDBC connection object can not be null");
-    Preconditions.checkArgument(query != null && query.length() > 0, "SQL query can not be null or empty");
-
-    try (Statement stmt = connection.createStatement()) {
-      return sqlToArrow(stmt.executeQuery(query), config);
-    }
-  }
-
-  /**
-   * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects. This
-   * method uses the default RootAllocator and Calendar object.
-   *
-   * @param resultSet ResultSet to use to fetch the data from underlying database
-   * @return Arrow Data Objects {@link VectorSchemaRoot}
-   * @throws SQLException on error
-   */
-  @Deprecated
-  public static VectorSchemaRoot sqlToArrow(ResultSet resultSet) throws SQLException, IOException {
-    Preconditions.checkNotNull(resultSet, "JDBC ResultSet object can not be null");
-
-    return sqlToArrow(resultSet, JdbcToArrowUtils.getUtcCalendar());
-  }
-
-  /**
-   * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects.
-   *
-   * @param resultSet ResultSet to use to fetch the data from underlying database
-   * @param allocator Memory allocator
-   * @return Arrow Data Objects {@link VectorSchemaRoot}
-   * @throws SQLException on error
-   */
-  @Deprecated
-  public static VectorSchemaRoot sqlToArrow(ResultSet resultSet, BufferAllocator allocator)
-      throws SQLException, IOException {
-    Preconditions.checkNotNull(allocator, "Memory Allocator object can not be null");
-
-    JdbcToArrowConfig config =
-        new JdbcToArrowConfig(allocator, JdbcToArrowUtils.getUtcCalendar());
-    return sqlToArrow(resultSet, config);
-  }
-
-  /**
-   * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects.
-   *
-   * @param resultSet ResultSet to use to fetch the data from underlying database
-   * @param calendar  Calendar instance to use for Date, Time and Timestamp datasets, or <code>null</code> if none.
-   * @return Arrow Data Objects {@link VectorSchemaRoot}
-   * @throws SQLException on error
-   */
-  @Deprecated
-  public static VectorSchemaRoot sqlToArrow(ResultSet resultSet, Calendar calendar) throws SQLException, IOException {
-    Preconditions.checkNotNull(resultSet, "JDBC ResultSet object can not be null");
-    return sqlToArrow(resultSet, new JdbcToArrowConfig(new RootAllocator(Integer.MAX_VALUE), calendar));
-  }
-
-  /**
-   * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects.
-   *
-   * @param resultSet ResultSet to use to fetch the data from underlying database
-   * @param allocator Memory allocator to use.
-   * @param calendar  Calendar instance to use for Date, Time and Timestamp datasets, or <code>null</code> if none.
-   * @return Arrow Data Objects {@link VectorSchemaRoot}
-   * @throws SQLException on error
-   */
-  @Deprecated
-  public static VectorSchemaRoot sqlToArrow(
-      ResultSet resultSet,
-      BufferAllocator allocator,
-      Calendar calendar)
-      throws SQLException, IOException {
-    Preconditions.checkNotNull(allocator, "Memory Allocator object can not be null");
-
-    return sqlToArrow(resultSet, new JdbcToArrowConfig(allocator, calendar));
-  }
-
-  /**
-   * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects.
-   *
-   * @param resultSet ResultSet to use to fetch the data from underlying database
-   * @param config    Configuration of the conversion from JDBC to Arrow.
-   * @return Arrow Data Objects {@link VectorSchemaRoot}
-   * @throws SQLException on error
-   */
-  @Deprecated
-  public static VectorSchemaRoot sqlToArrow(ResultSet resultSet, JdbcToArrowConfig config)
-      throws SQLException, IOException {
-    Preconditions.checkNotNull(resultSet, "JDBC ResultSet object can not be null");
-    Preconditions.checkNotNull(config, "The configuration cannot be null");
-
-    VectorSchemaRoot root = VectorSchemaRoot.create(
-        JdbcToArrowUtils.jdbcToArrowSchema(resultSet.getMetaData(), config), config.getAllocator());
-    if (config.getTargetBatchSize() != JdbcToArrowConfig.NO_LIMIT_BATCH_SIZE) {
-      ValueVectorUtility.preAllocate(root, config.getTargetBatchSize());
-    }
-    JdbcToArrowUtils.jdbcToArrowVectors(resultSet, root, config);
-    return root;
-  }
-
-  /*----------------------------------------------------------------*
-   |                                                                |
-   |          Partial Convert API                        |
-   |                                                                |
-   *----------------------------------------------------------------*/
-
-  /**
-   * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects.
-   * Note here uses the default targetBatchSize = 1024.
-   *
-   * @param resultSet ResultSet to use to fetch the data from underlying database
-   * @param allocator Memory allocator
-   * @return Arrow Data Objects {@link ArrowVectorIterator}
-   * @throws SQLException on error
-   */
-  public static ArrowVectorIterator sqlToArrowVectorIterator(
-      ResultSet resultSet,
-      BufferAllocator allocator)
-      throws SQLException, IOException {
-    Preconditions.checkNotNull(allocator, "Memory Allocator object can not be null");
-
-    JdbcToArrowConfig config =
-        new JdbcToArrowConfig(allocator, JdbcToArrowUtils.getUtcCalendar());
-    return sqlToArrowVectorIterator(resultSet, config);
-  }
-
-  /**
-   * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects.
-   * Note if not specify {@link JdbcToArrowConfig#targetBatchSize}, will use default value 1024.
-   * @param resultSet ResultSet to use to fetch the data from underlying database
-   * @param config    Configuration of the conversion from JDBC to Arrow.
-   * @return Arrow Data Objects {@link ArrowVectorIterator}
-   * @throws SQLException on error
-   */
-  public static ArrowVectorIterator sqlToArrowVectorIterator(
-      ResultSet resultSet,
-      JdbcToArrowConfig config)
-      throws SQLException, IOException {
-    Preconditions.checkNotNull(resultSet, "JDBC ResultSet object can not be null");
-    Preconditions.checkNotNull(config, "The configuration cannot be null");
-    return ArrowVectorIterator.create(resultSet, config);
-  }
-}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfig.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfig.java
deleted file mode 100644
index 9a587e7..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfig.java
+++ /dev/null
@@ -1,280 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc;
-
-import static org.apache.arrow.vector.types.FloatingPointPrecision.DOUBLE;
-import static org.apache.arrow.vector.types.FloatingPointPrecision.SINGLE;
-
-import java.sql.Types;
-import java.util.Calendar;
-import java.util.Map;
-import java.util.function.Function;
-
-import org.apache.arrow.memory.BufferAllocator;
-import org.apache.arrow.util.Preconditions;
-import org.apache.arrow.vector.types.DateUnit;
-import org.apache.arrow.vector.types.TimeUnit;
-import org.apache.arrow.vector.types.pojo.ArrowType;
-
-/**
- * This class configures the JDBC-to-Arrow conversion process.
- * <p>
- * The allocator is used to construct the {@link org.apache.arrow.vector.VectorSchemaRoot},
- * and the calendar is used to define the time zone of any
- * {@link org.apache.arrow.vector.types.pojo.ArrowType.Timestamp}
- * fields that are created during the conversion.  Neither field may be <code>null</code>.
- * </p>
- * <p>
- * If the <code>includeMetadata</code> flag is set, the Arrow field metadata will contain information
- * from the corresponding {@link java.sql.ResultSetMetaData} that was used to create the
- * {@link org.apache.arrow.vector.types.pojo.FieldType} of the corresponding
- * {@link org.apache.arrow.vector.FieldVector}.
- * </p>
- * <p>
- * If there are any {@link java.sql.Types#ARRAY} fields in the {@link java.sql.ResultSet}, the corresponding
- * {@link JdbcFieldInfo} for the array's contents must be defined here.  Unfortunately, the sub-type
- * information cannot be retrieved from all JDBC implementations (H2 for example, returns
- * {@link java.sql.Types#NULL} for the array sub-type), so it must be configured here.  The column index
- * or name can be used to map to a {@link JdbcFieldInfo}, and that will be used for the conversion.
- * </p>
- */
-public final class JdbcToArrowConfig {
-
-  private final Calendar calendar;
-  private final BufferAllocator allocator;
-  private final boolean includeMetadata;
-  private final Map<Integer, JdbcFieldInfo> arraySubTypesByColumnIndex;
-  private final Map<String, JdbcFieldInfo> arraySubTypesByColumnName;
-
-  public static final int DEFAULT_TARGET_BATCH_SIZE = 1024;
-  public static final int NO_LIMIT_BATCH_SIZE = -1;
-
-  /**
-   * The maximum rowCount to read each time when partially convert data.
-   * Default value is 1024 and -1 means disable partial read.
-   * default is -1 which means disable partial read.
-   * Note that this flag only useful for {@link JdbcToArrow#sqlToArrowVectorIterator}
-   * 1) if targetBatchSize != -1, it will convert full data into multiple vectors
-   * with valueCount no more than targetBatchSize.
-   * 2) if targetBatchSize == -1, it will convert full data into a single vector in {@link ArrowVectorIterator}
-   * </p>
-   */
-  private final int targetBatchSize;
-
-  private final Function<JdbcFieldInfo, ArrowType> jdbcToArrowTypeConverter;
-
-  /**
-   * Constructs a new configuration from the provided allocator and calendar.  The <code>allocator</code>
-   * is used when constructing the Arrow vectors from the ResultSet, and the calendar is used to define
-   * Arrow Timestamp fields, and to read time-based fields from the JDBC <code>ResultSet</code>. 
-   *
-   * @param allocator       The memory allocator to construct the Arrow vectors with.
-   * @param calendar        The calendar to use when constructing Timestamp fields and reading time-based results.
-   */
-  JdbcToArrowConfig(BufferAllocator allocator, Calendar calendar) {
-    this(allocator, calendar, false, null, null, DEFAULT_TARGET_BATCH_SIZE, null);
-  }
-
-  /**
-   * Constructs a new configuration from the provided allocator and calendar.  The <code>allocator</code>
-   * is used when constructing the Arrow vectors from the ResultSet, and the calendar is used to define
-   * Arrow Timestamp fields, and to read time-based fields from the JDBC <code>ResultSet</code>. 
-   *
-   * @param allocator       The memory allocator to construct the Arrow vectors with.
-   * @param calendar        The calendar to use when constructing Timestamp fields and reading time-based results.
-   * @param includeMetadata Whether to include JDBC field metadata in the Arrow Schema Field metadata.
-   * @param arraySubTypesByColumnIndex The type of the JDBC array at the column index (1-based).
-   * @param arraySubTypesByColumnName  The type of the JDBC array at the column name.
-   * @param jdbcToArrowTypeConverter The function that maps JDBC field type information to arrow type. If set to null,
-   *                                 the default mapping will be used, which is defined as:
-   *  <ul>
-   *    <li>CHAR --> ArrowType.Utf8</li>
-   *    <li>NCHAR --> ArrowType.Utf8</li>
-   *    <li>VARCHAR --> ArrowType.Utf8</li>
-   *    <li>NVARCHAR --> ArrowType.Utf8</li>
-   *    <li>LONGVARCHAR --> ArrowType.Utf8</li>
-   *    <li>LONGNVARCHAR --> ArrowType.Utf8</li>
-   *    <li>NUMERIC --> ArrowType.Decimal(precision, scale)</li>
-   *    <li>DECIMAL --> ArrowType.Decimal(precision, scale)</li>
-   *    <li>BIT --> ArrowType.Bool</li>
-   *    <li>TINYINT --> ArrowType.Int(8, signed)</li>
-   *    <li>SMALLINT --> ArrowType.Int(16, signed)</li>
-   *    <li>INTEGER --> ArrowType.Int(32, signed)</li>
-   *    <li>BIGINT --> ArrowType.Int(64, signed)</li>
-   *    <li>REAL --> ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE)</li>
-   *    <li>FLOAT --> ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE)</li>
-   *    <li>DOUBLE --> ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE)</li>
-   *    <li>BINARY --> ArrowType.Binary</li>
-   *    <li>VARBINARY --> ArrowType.Binary</li>
-   *    <li>LONGVARBINARY --> ArrowType.Binary</li>
-   *    <li>DATE --> ArrowType.Date(DateUnit.DAY)</li>
-   *    <li>TIME --> ArrowType.Time(TimeUnit.MILLISECOND, 32)</li>
-   *    <li>TIMESTAMP --> ArrowType.Timestamp(TimeUnit.MILLISECOND, calendar timezone)</li>
-   *    <li>CLOB --> ArrowType.Utf8</li>
-   *    <li>BLOB --> ArrowType.Binary</li>
-   *    <li>NULL --> ArrowType.Null</li>
-   *  </ul>
-   */
-  JdbcToArrowConfig(
-      BufferAllocator allocator,
-      Calendar calendar,
-      boolean includeMetadata,
-      Map<Integer, JdbcFieldInfo> arraySubTypesByColumnIndex,
-      Map<String, JdbcFieldInfo> arraySubTypesByColumnName,
-      int targetBatchSize,
-      Function<JdbcFieldInfo, ArrowType> jdbcToArrowTypeConverter) {
-    Preconditions.checkNotNull(allocator, "Memory allocator cannot be null");
-    this.allocator = allocator;
-    this.calendar = calendar;
-    this.includeMetadata = includeMetadata;
-    this.arraySubTypesByColumnIndex = arraySubTypesByColumnIndex;
-    this.arraySubTypesByColumnName = arraySubTypesByColumnName;
-    this.targetBatchSize = targetBatchSize;
-
-    // set up type converter
-    this.jdbcToArrowTypeConverter = jdbcToArrowTypeConverter != null ? jdbcToArrowTypeConverter :
-        fieldInfo -> {
-          final String timezone;
-          if (calendar != null) {
-            timezone = calendar.getTimeZone().getID();
-          } else {
-            timezone = null;
-          }
-
-          switch (fieldInfo.getJdbcType()) {
-            case Types.BOOLEAN:
-            case Types.BIT:
-              return new ArrowType.Bool();
-            case Types.TINYINT:
-              return new ArrowType.Int(8, true);
-            case Types.SMALLINT:
-              return new ArrowType.Int(16, true);
-            case Types.INTEGER:
-              return new ArrowType.Int(32, true);
-            case Types.BIGINT:
-              return new ArrowType.Int(64, true);
-            case Types.NUMERIC:
-            case Types.DECIMAL:
-              int precision = fieldInfo.getPrecision();
-              int scale = fieldInfo.getScale();
-              return new ArrowType.Decimal(precision, scale, 128);
-            case Types.REAL:
-            case Types.FLOAT:
-              return new ArrowType.FloatingPoint(SINGLE);
-            case Types.DOUBLE:
-              return new ArrowType.FloatingPoint(DOUBLE);
-            case Types.CHAR:
-            case Types.NCHAR:
-            case Types.VARCHAR:
-            case Types.NVARCHAR:
-            case Types.LONGVARCHAR:
-            case Types.LONGNVARCHAR:
-            case Types.CLOB:
-              return new ArrowType.Utf8();
-            case Types.DATE:
-              return new ArrowType.Date(DateUnit.DAY);
-            case Types.TIME:
-              return new ArrowType.Time(TimeUnit.MILLISECOND, 32);
-            case Types.TIMESTAMP:
-              return new ArrowType.Timestamp(TimeUnit.MILLISECOND, timezone);
-            case Types.BINARY:
-            case Types.VARBINARY:
-            case Types.LONGVARBINARY:
-            case Types.BLOB:
-              return new ArrowType.Binary();
-            case Types.ARRAY:
-              return new ArrowType.List();
-            case Types.NULL:
-              return new ArrowType.Null();
-            default:
-              // no-op, shouldn't get here
-              return null;
-          }
-        };
-  }
-
-  /**
-   * The calendar to use when defining Arrow Timestamp fields
-   * and retrieving {@link java.sql.Date}, {@link java.sql.Time}, or {@link java.sql.Timestamp}
-   * data types from the {@link java.sql.ResultSet}, or <code>null</code> if not converting.
-   *
-   * @return the calendar.
-   */
-  public Calendar getCalendar() {
-    return calendar;
-  }
-
-  /**
-   * The Arrow memory allocator.
-   * @return the allocator.
-   */
-  public BufferAllocator getAllocator() {
-    return allocator;
-  }
-
-  /**
-   * Whether to include JDBC ResultSet field metadata in the Arrow Schema field metadata.
-   *
-   * @return <code>true</code> to include field metadata, <code>false</code> to exclude it.
-   */
-  public boolean shouldIncludeMetadata() {
-    return includeMetadata;
-  }
-
-  /**
-   * Get the target batch size for partial read.
-   */
-  public int getTargetBatchSize() {
-    return targetBatchSize;
-  }
-
-  /**
-   * Gets the mapping between JDBC type information to Arrow type.
-   */
-  public Function<JdbcFieldInfo, ArrowType> getJdbcToArrowTypeConverter() {
-    return jdbcToArrowTypeConverter;
-  }
-
-  /**
-   * Returns the array sub-type {@link JdbcFieldInfo} defined for the provided column index.
-   *
-   * @param index The {@link java.sql.ResultSetMetaData} column index of an {@link java.sql.Types#ARRAY} type.
-   * @return The {@link JdbcFieldInfo} for that array's sub-type, or <code>null</code> if not defined.
-   */
-  public JdbcFieldInfo getArraySubTypeByColumnIndex(int index) {
-    if (arraySubTypesByColumnIndex == null) {
-      return null;
-    } else {
-      return arraySubTypesByColumnIndex.get(index);
-    }
-  }
-
-  /**
-   * Returns the array sub-type {@link JdbcFieldInfo} defined for the provided column name.
-   *
-   * @param name The {@link java.sql.ResultSetMetaData} column name of an {@link java.sql.Types#ARRAY} type.
-   * @return The {@link JdbcFieldInfo} for that array's sub-type, or <code>null</code> if not defined.
-   */
-  public JdbcFieldInfo getArraySubTypeByColumnName(String name) {
-    if (arraySubTypesByColumnName == null) {
-      return null;
-    } else {
-      return arraySubTypesByColumnName.get(name);
-    }
-  }
-}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfigBuilder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfigBuilder.java
deleted file mode 100644
index cf08735..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfigBuilder.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc;
-
-import static org.apache.arrow.adapter.jdbc.JdbcToArrowConfig.DEFAULT_TARGET_BATCH_SIZE;
-
-import java.util.Calendar;
-import java.util.Map;
-import java.util.function.Function;
-
-import org.apache.arrow.memory.BufferAllocator;
-import org.apache.arrow.util.Preconditions;
-import org.apache.arrow.vector.types.pojo.ArrowType;
-
-/**
- * This class builds {@link JdbcToArrowConfig}s.
- */
-public class JdbcToArrowConfigBuilder {
-  private Calendar calendar;
-  private BufferAllocator allocator;
-  private boolean includeMetadata;
-  private Map<Integer, JdbcFieldInfo> arraySubTypesByColumnIndex;
-  private Map<String, JdbcFieldInfo> arraySubTypesByColumnName;
-
-  private int targetBatchSize;
-  private Function<JdbcFieldInfo, ArrowType> jdbcToArrowTypeConverter;
-
-  /**
-   * Default constructor for the <code>JdbcToArrowConfigBuilder}</code>.
-   * Use the setter methods for the allocator and calendar; the allocator must be
-   * set.  Otherwise, {@link #build()} will throw a {@link NullPointerException}.
-   */
-  public JdbcToArrowConfigBuilder() {
-    this.allocator = null;
-    this.calendar = null;
-    this.includeMetadata = false;
-    this.arraySubTypesByColumnIndex = null;
-    this.arraySubTypesByColumnName = null;
-  }
-
-  /**
-   * Constructor for the <code>JdbcToArrowConfigBuilder</code>.  The
-   * allocator is required, and a {@link NullPointerException}
-   * will be thrown if it is <code>null</code>.
-   * <p>
-   * The allocator is used to construct Arrow vectors from the JDBC ResultSet.
-   * The calendar is used to determine the time zone of {@link java.sql.Timestamp}
-   * fields and convert {@link java.sql.Date}, {@link java.sql.Time}, and
-   * {@link java.sql.Timestamp} fields to a single, common time zone when reading
-   * from the result set.
-   * </p>
-   *
-   * @param allocator The Arrow Vector memory allocator.
-   * @param calendar The calendar to use when constructing timestamp fields.
-   */
-  public JdbcToArrowConfigBuilder(BufferAllocator allocator, Calendar calendar) {
-    this();
-
-    Preconditions.checkNotNull(allocator, "Memory allocator cannot be null");
-
-    this.allocator = allocator;
-    this.calendar = calendar;
-    this.includeMetadata = false;
-    this.targetBatchSize = DEFAULT_TARGET_BATCH_SIZE;
-  }
-
-  /**
-   * Constructor for the <code>JdbcToArrowConfigBuilder</code>.  Both the
-   * allocator and calendar are required.  A {@link NullPointerException}
-   * will be thrown if either of those arguments is <code>null</code>.
-   * <p>
-   * The allocator is used to construct Arrow vectors from the JDBC ResultSet.
-   * The calendar is used to determine the time zone of {@link java.sql.Timestamp}
-   * fields and convert {@link java.sql.Date}, {@link java.sql.Time}, and
-   * {@link java.sql.Timestamp} fields to a single, common time zone when reading
-   * from the result set.
-   * </p>
-   * <p>
-   * The <code>includeMetadata</code> argument, if <code>true</code> will cause
-   * various information about each database field to be added to the Vector
-   * Schema's field metadata.
-   * </p>
-   *
-   * @param allocator The Arrow Vector memory allocator.
-   * @param calendar The calendar to use when constructing timestamp fields.
-   */
-  public JdbcToArrowConfigBuilder(BufferAllocator allocator, Calendar calendar, boolean includeMetadata) {
-    this(allocator, calendar);
-    this.includeMetadata = includeMetadata;
-  }
-
-  /**
-   * Sets the memory allocator to use when constructing the Arrow vectors from the ResultSet.
-   *
-   * @param allocator the allocator to set.
-   * @exception NullPointerException if <code>allocator</code> is null.
-   */
-  public JdbcToArrowConfigBuilder setAllocator(BufferAllocator allocator) {
-    Preconditions.checkNotNull(allocator, "Memory allocator cannot be null");
-    this.allocator = allocator;
-    return this;
-  }
-
-  /**
-   * Sets the {@link Calendar} to use when constructing timestamp fields in the
-   * Arrow schema, and reading time-based fields from the JDBC <code>ResultSet</code>.
-   *
-   * @param calendar the calendar to set.
-   */
-  public JdbcToArrowConfigBuilder setCalendar(Calendar calendar) {
-    this.calendar = calendar;
-    return this;
-  }
-
-  /**
-   * Sets whether to include JDBC ResultSet field metadata in the Arrow Schema field metadata.
-   *
-   * @param includeMetadata Whether to include or exclude JDBC metadata in the Arrow Schema field metadata.
-   * @return This instance of the <code>JdbcToArrowConfig</code>, for chaining.
-   */
-  public JdbcToArrowConfigBuilder setIncludeMetadata(boolean includeMetadata) {
-    this.includeMetadata = includeMetadata;
-    return this;
-  }
-
-  /**
-   * Sets the mapping of column-index-to-{@link JdbcFieldInfo} used for columns of type {@link java.sql.Types#ARRAY}.
-   * The column index is 1-based, to match the JDBC column index.
-   *
-   * @param map The mapping.
-   * @return This instance of the <code>JdbcToArrowConfig</code>, for chaining.
-   */
-  public JdbcToArrowConfigBuilder setArraySubTypeByColumnIndexMap(Map<Integer, JdbcFieldInfo> map) {
-    this.arraySubTypesByColumnIndex = map;
-    return this;
-  }
-
-  /**
-   * Sets the mapping of column-name-to-{@link JdbcFieldInfo} used for columns of type {@link java.sql.Types#ARRAY}.
-   *
-   * @param map The mapping.
-   * @return This instance of the <code>JdbcToArrowConfig</code>, for chaining.
-   */
-  public JdbcToArrowConfigBuilder setArraySubTypeByColumnNameMap(Map<String, JdbcFieldInfo> map) {
-    this.arraySubTypesByColumnName = map;
-    return this;
-  }
-
-  public JdbcToArrowConfigBuilder setTargetBatchSize(int targetBatchSize) {
-    this.targetBatchSize = targetBatchSize;
-    return this;
-  }
-
-  public JdbcToArrowConfigBuilder setJdbcToArrowTypeConverter(
-      Function<JdbcFieldInfo, ArrowType> jdbcToArrowTypeConverter) {
-    this.jdbcToArrowTypeConverter = jdbcToArrowTypeConverter;
-    return this;
-  }
-
-  /**
-   * This builds the {@link JdbcToArrowConfig} from the provided
-   * {@link BufferAllocator} and {@link Calendar}.
-   *
-   * @return The built {@link JdbcToArrowConfig}
-   * @throws NullPointerException if either the allocator or calendar was not set.
-   */
-  public JdbcToArrowConfig build() {
-    return new JdbcToArrowConfig(
-        allocator,
-        calendar,
-        includeMetadata,
-        arraySubTypesByColumnIndex,
-        arraySubTypesByColumnName,
-        targetBatchSize,
-        jdbcToArrowTypeConverter);
-  }
-}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java
deleted file mode 100644
index 8361c11..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc;
-
-import java.io.IOException;
-import java.sql.Date;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.sql.Time;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.TimeZone;
-
-import org.apache.arrow.adapter.jdbc.consumer.ArrayConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.BigIntConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.BinaryConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.BitConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.CompositeJdbcConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.DateConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.DecimalConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.DoubleConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.FloatConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.IntConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.JdbcConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.NullConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.SmallIntConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.TimeConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.TimestampConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.TimestampTZConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.TinyIntConsumer;
-import org.apache.arrow.adapter.jdbc.consumer.VarCharConsumer;
-import org.apache.arrow.memory.RootAllocator;
-import org.apache.arrow.util.Preconditions;
-import org.apache.arrow.vector.BigIntVector;
-import org.apache.arrow.vector.BitVector;
-import org.apache.arrow.vector.DateDayVector;
-import org.apache.arrow.vector.DecimalVector;
-import org.apache.arrow.vector.FieldVector;
-import org.apache.arrow.vector.Float4Vector;
-import org.apache.arrow.vector.Float8Vector;
-import org.apache.arrow.vector.IntVector;
-import org.apache.arrow.vector.NullVector;
-import org.apache.arrow.vector.SmallIntVector;
-import org.apache.arrow.vector.TimeMilliVector;
-import org.apache.arrow.vector.TimeStampMilliTZVector;
-import org.apache.arrow.vector.TimeStampMilliVector;
-import org.apache.arrow.vector.TinyIntVector;
-import org.apache.arrow.vector.VarBinaryVector;
-import org.apache.arrow.vector.VarCharVector;
-import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.complex.ListVector;
-import org.apache.arrow.vector.types.pojo.ArrowType;
-import org.apache.arrow.vector.types.pojo.Field;
-import org.apache.arrow.vector.types.pojo.FieldType;
-import org.apache.arrow.vector.types.pojo.Schema;
-import org.apache.arrow.vector.util.ValueVectorUtility;
-
-/**
- * Class that does most of the work to convert JDBC ResultSet data into Arrow columnar format Vector objects.
- *
- * @since 0.10.0
- */
-public class JdbcToArrowUtils {
-
-  private static final int JDBC_ARRAY_VALUE_COLUMN = 2;
-
-  /**
-   * Returns the instance of a {java.util.Calendar} with the UTC time zone and root locale.
-   */
-  public static Calendar getUtcCalendar() {
-    return Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT);
-  }
-
-  /**
-   * Create Arrow {@link Schema} object for the given JDBC {@link ResultSetMetaData}.
-   *
-   * @param rsmd The ResultSetMetaData containing the results, to read the JDBC metadata from.
-   * @param calendar The calendar to use the time zone field of, to construct Timestamp fields from.
-   * @return {@link Schema}
-   * @throws SQLException on error
-   */
-  public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, Calendar calendar) throws SQLException {
-    Preconditions.checkNotNull(calendar, "Calendar object can't be null");
-
-    return jdbcToArrowSchema(rsmd, new JdbcToArrowConfig(new RootAllocator(0), calendar));
-  }
-
-  /**
-   * Create Arrow {@link Schema} object for the given JDBC {@link java.sql.ResultSetMetaData}.
-   *
-   * <p>
-   * If {@link JdbcToArrowConfig#shouldIncludeMetadata()} returns <code>true</code>, the following fields
-   * will be added to the {@link FieldType#getMetadata()}:
-   * <ul>
-   *  <li>{@link Constants#SQL_CATALOG_NAME_KEY} representing {@link ResultSetMetaData#getCatalogName(int)}</li>
-   *  <li>{@link Constants#SQL_TABLE_NAME_KEY} representing {@link ResultSetMetaData#getTableName(int)}</li>
-   *  <li>{@link Constants#SQL_COLUMN_NAME_KEY} representing {@link ResultSetMetaData#getColumnName(int)}</li>
-   *  <li>{@link Constants#SQL_TYPE_KEY} representing {@link ResultSetMetaData#getColumnTypeName(int)}</li>
-   * </ul>
-   * </p>
-   * <p>
-   * If any columns are of type {@link java.sql.Types#ARRAY}, the configuration object will be used to look up
-   * the array sub-type field.  The {@link JdbcToArrowConfig#getArraySubTypeByColumnIndex(int)} method will be
-   * checked first, followed by the {@link JdbcToArrowConfig#getArraySubTypeByColumnName(String)} method.
-   * </p>
-   *
-   * @param rsmd The ResultSetMetaData containing the results, to read the JDBC metadata from.
-   * @param config The configuration to use when constructing the schema.
-   * @return {@link Schema}
-   * @throws SQLException on error
-   * @throws IllegalArgumentException if <code>rsmd</code> contains an {@link java.sql.Types#ARRAY} but the
-   *                                  <code>config</code> does not have a sub-type definition for it.
-   */
-  public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, JdbcToArrowConfig config) throws SQLException {
-    Preconditions.checkNotNull(rsmd, "JDBC ResultSetMetaData object can't be null");
-    Preconditions.checkNotNull(config, "The configuration object must not be null");
-
-    List<Field> fields = new ArrayList<>();
-    int columnCount = rsmd.getColumnCount();
-    for (int i = 1; i <= columnCount; i++) {
-      final String columnName = rsmd.getColumnName(i);
-
-      final Map<String, String> metadata;
-      if (config.shouldIncludeMetadata()) {
-        metadata = new HashMap<>();
-        metadata.put(Constants.SQL_CATALOG_NAME_KEY, rsmd.getCatalogName(i));
-        metadata.put(Constants.SQL_TABLE_NAME_KEY, rsmd.getTableName(i));
-        metadata.put(Constants.SQL_COLUMN_NAME_KEY, columnName);
-        metadata.put(Constants.SQL_TYPE_KEY, rsmd.getColumnTypeName(i));
-
-      } else {
-        metadata = null;
-      }
-
-      final ArrowType arrowType = config.getJdbcToArrowTypeConverter().apply(new JdbcFieldInfo(rsmd, i));
-      if (arrowType != null) {
-        final FieldType fieldType = new FieldType(true, arrowType, /* dictionary encoding */ null, metadata);
-
-        List<Field> children = null;
-        if (arrowType.getTypeID() == ArrowType.List.TYPE_TYPE) {
-          final JdbcFieldInfo arrayFieldInfo = getJdbcFieldInfoForArraySubType(rsmd, i, config);
-          if (arrayFieldInfo == null) {
-            throw new IllegalArgumentException("Configuration does not provide a mapping for array column " + i);
-          }
-          children = new ArrayList<Field>();
-          final ArrowType childType = config.getJdbcToArrowTypeConverter().apply(arrayFieldInfo);
-          children.add(new Field("child", FieldType.nullable(childType), null));
-        }
-
-        fields.add(new Field(columnName, fieldType, children));
-      }
-    }
-
-    return new Schema(fields, null);
-  }
-
-  /* Uses the configuration to determine what the array sub-type JdbcFieldInfo is.
-   * If no sub-type can be found, returns null.
-   */
-  private static JdbcFieldInfo getJdbcFieldInfoForArraySubType(
-      ResultSetMetaData rsmd,
-      int arrayColumn,
-      JdbcToArrowConfig config)
-          throws SQLException {
-
-    Preconditions.checkNotNull(rsmd, "ResultSet MetaData object cannot be null");
-    Preconditions.checkNotNull(config, "Configuration must not be null");
-    Preconditions.checkArgument(
-        arrayColumn > 0,
-        "ResultSetMetaData columns start with 1; column cannot be less than 1");
-    Preconditions.checkArgument(
-        arrayColumn <= rsmd.getColumnCount(),
-        "Column number cannot be more than the number of columns");
-
-    JdbcFieldInfo fieldInfo = config.getArraySubTypeByColumnIndex(arrayColumn);
-    if (fieldInfo == null) {
-      fieldInfo = config.getArraySubTypeByColumnName(rsmd.getColumnName(arrayColumn));
-    }
-    return fieldInfo;
-  }
-
-  /**
-   * Iterate the given JDBC {@link ResultSet} object to fetch the data and transpose it to populate
-   * the given Arrow Vector objects.
-   *
-   * @param rs       ResultSet to use to fetch the data from underlying database
-   * @param root     Arrow {@link VectorSchemaRoot} object to populate
-   * @param calendar The calendar to use when reading {@link Date}, {@link Time}, or {@link Timestamp}
-   *                 data types from the {@link ResultSet}, or <code>null</code> if not converting.
-   * @throws SQLException on error
-   */
-  public static void jdbcToArrowVectors(ResultSet rs, VectorSchemaRoot root, Calendar calendar)
-      throws SQLException, IOException {
-
-    Preconditions.checkNotNull(calendar, "Calendar object can't be null");
-
-    jdbcToArrowVectors(rs, root, new JdbcToArrowConfig(new RootAllocator(0), calendar));
-  }
-
-  static boolean isColumnNullable(ResultSet resultSet, int index) throws SQLException {
-    int nullableValue = resultSet.getMetaData().isNullable(index);
-    return nullableValue == ResultSetMetaData.columnNullable ||
-        nullableValue == ResultSetMetaData.columnNullableUnknown;
-  }
-
-  /**
-   * Iterate the given JDBC {@link ResultSet} object to fetch the data and transpose it to populate
-   * the given Arrow Vector objects.
-   *
-   * @param rs     ResultSet to use to fetch the data from underlying database
-   * @param root   Arrow {@link VectorSchemaRoot} object to populate
-   * @param config The configuration to use when reading the data.
-   * @throws SQLException on error
-   */
-  public static void jdbcToArrowVectors(ResultSet rs, VectorSchemaRoot root, JdbcToArrowConfig config)
-      throws SQLException, IOException {
-
-    ResultSetMetaData rsmd = rs.getMetaData();
-    int columnCount = rsmd.getColumnCount();
-
-    JdbcConsumer[] consumers = new JdbcConsumer[columnCount];
-    for (int i = 1; i <= columnCount; i++) {
-      FieldVector vector = root.getVector(rsmd.getColumnName(i));
-      consumers[i - 1] = getConsumer(vector.getField().getType(), i, isColumnNullable(rs, i), vector, config);
-    }
-
-    CompositeJdbcConsumer compositeConsumer = null;
-    // Only clean resources when occurs error,
-    // vectors within consumers are useful and users are responsible for its close.
-    try {
-      compositeConsumer = new CompositeJdbcConsumer(consumers);
-      int readRowCount = 0;
-      if (config.getTargetBatchSize() == JdbcToArrowConfig.NO_LIMIT_BATCH_SIZE) {
-        while (rs.next()) {
-          ValueVectorUtility.ensureCapacity(root, readRowCount + 1);
-          compositeConsumer.consume(rs);
-          readRowCount++;
-        }
-      } else {
-        while (rs.next() && readRowCount < config.getTargetBatchSize()) {
-          compositeConsumer.consume(rs);
-          readRowCount++;
-        }
-      }
-
-      root.setRowCount(readRowCount);
-    } catch (Exception e) {
-      // error occurs and clean up resources.
-      if (compositeConsumer != null) {
-        compositeConsumer.close();
-      }
-      throw e;
-    }
-  }
-
-  static JdbcConsumer getConsumer(ArrowType arrowType, int columnIndex, boolean nullable,
-      FieldVector vector, JdbcToArrowConfig config) {
-    final Calendar calendar = config.getCalendar();
-
-    switch (arrowType.getTypeID()) {
-      case Bool:
-        return BitConsumer.createConsumer((BitVector) vector, columnIndex, nullable);
-      case Int:
-        switch (((ArrowType.Int) arrowType).getBitWidth()) {
-          case 8:
-            return TinyIntConsumer.createConsumer((TinyIntVector) vector, columnIndex, nullable);
-          case 16:
-            return SmallIntConsumer.createConsumer((SmallIntVector) vector, columnIndex, nullable);
-          case 32:
-            return IntConsumer.createConsumer((IntVector) vector, columnIndex, nullable);
-          case 64:
-            return BigIntConsumer.createConsumer((BigIntVector) vector, columnIndex, nullable);
-          default:
-            return null;
-        }
-      case Decimal:
-        return DecimalConsumer.createConsumer((DecimalVector) vector, columnIndex, nullable);
-      case FloatingPoint:
-        switch (((ArrowType.FloatingPoint) arrowType).getPrecision()) {
-          case SINGLE:
-            return FloatConsumer.createConsumer((Float4Vector) vector, columnIndex, nullable);
-          case DOUBLE:
-            return DoubleConsumer.createConsumer((Float8Vector) vector, columnIndex, nullable);
-          default:
-            return null;
-        }
-      case Utf8:
-      case LargeUtf8:
-        return VarCharConsumer.createConsumer((VarCharVector) vector, columnIndex, nullable);
-      case Binary:
-      case LargeBinary:
-        return BinaryConsumer.createConsumer((VarBinaryVector) vector, columnIndex, nullable);
-      case Date:
-        return DateConsumer.createConsumer((DateDayVector) vector, columnIndex, nullable, calendar);
-      case Time:
-        return TimeConsumer.createConsumer((TimeMilliVector) vector, columnIndex, nullable, calendar);
-      case Timestamp:
-        if (config.getCalendar() == null) {
-          return TimestampConsumer.createConsumer((TimeStampMilliVector) vector, columnIndex, nullable);
-        } else {
-          return TimestampTZConsumer.createConsumer((TimeStampMilliTZVector) vector, columnIndex, nullable, calendar);
-        }
-      case List:
-        FieldVector childVector = ((ListVector) vector).getDataVector();
-        JdbcConsumer delegate = getConsumer(childVector.getField().getType(), JDBC_ARRAY_VALUE_COLUMN,
-            childVector.getField().isNullable(), childVector, config);
-        return ArrayConsumer.createConsumer((ListVector) vector, delegate, columnIndex, nullable);
-      case Null:
-        return new NullConsumer((NullVector) vector);
-      default:
-        // no-op, shouldn't get here
-        throw new UnsupportedOperationException();
-    }
-  }
-}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/ArrayConsumer.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/ArrayConsumer.java
deleted file mode 100644
index ed12f81..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/ArrayConsumer.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc.consumer;
-
-import java.io.IOException;
-import java.sql.Array;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.apache.arrow.vector.ValueVector;
-import org.apache.arrow.vector.complex.ListVector;
-
-/**
- * Consumer which consume array type values from {@link ResultSet}.
- * Write the data to {@link org.apache.arrow.vector.complex.ListVector}.
- */
-public abstract class ArrayConsumer extends BaseConsumer<ListVector> {
-
-  /**
-   * Creates a consumer for {@link ListVector}.
-   */
-  public static ArrayConsumer createConsumer(
-          ListVector vector, JdbcConsumer delegate, int index, boolean nullable) {
-    if (nullable) {
-      return new ArrayConsumer.NullableArrayConsumer(vector, delegate, index);
-    } else {
-      return new ArrayConsumer.NonNullableArrayConsumer(vector, delegate, index);
-    }
-  }
-
-  protected final JdbcConsumer delegate;
-
-  private final ValueVector innerVector;
-
-  protected int innerVectorIndex = 0;
-
-  /**
-   * Instantiate a ArrayConsumer.
-   */
-  public ArrayConsumer(ListVector vector, JdbcConsumer delegate, int index) {
-    super(vector, index);
-    this.delegate = delegate;
-    this.innerVector = vector.getDataVector();
-  }
-
-  @Override
-  public void close() throws Exception {
-    this.vector.close();
-    this.delegate.close();
-  }
-
-  void ensureInnerVectorCapacity(int targetCapacity) {
-    while (innerVector.getValueCapacity() < targetCapacity) {
-      innerVector.reAlloc();
-    }
-  }
-
-  /**
-   * Nullable consumer for {@link ListVector}.
-   */
-  static class NullableArrayConsumer extends ArrayConsumer {
-
-    /**
-     * Instantiate a nullable array consumer.
-     */
-    public NullableArrayConsumer(ListVector vector, JdbcConsumer delegate, int index) {
-      super(vector, delegate, index);
-    }
-
-    @Override
-    public void consume(ResultSet resultSet) throws SQLException, IOException {
-      final Array array = resultSet.getArray(columnIndexInResultSet);
-      if (!resultSet.wasNull()) {
-        vector.startNewValue(currentIndex);
-        int count = 0;
-        try (ResultSet rs = array.getResultSet()) {
-          while (rs.next()) {
-            ensureInnerVectorCapacity(innerVectorIndex + count + 1);
-            delegate.consume(rs);
-            count++;
-          }
-        }
-        vector.endValue(currentIndex, count);
-        innerVectorIndex += count;
-      }
-      currentIndex++;
-    }
-  }
-
-  /**
-   * Non-nullable consumer for {@link ListVector}.
-   */
-  static class NonNullableArrayConsumer extends ArrayConsumer {
-
-    /**
-     * Instantiate a nullable array consumer.
-     */
-    public NonNullableArrayConsumer(ListVector vector, JdbcConsumer delegate, int index) {
-      super(vector, delegate, index);
-    }
-
-    @Override
-    public void consume(ResultSet resultSet) throws SQLException, IOException {
-      final Array array = resultSet.getArray(columnIndexInResultSet);
-      vector.startNewValue(currentIndex);
-      int count = 0;
-      try (ResultSet rs = array.getResultSet()) {
-        while (rs.next()) {
-          ensureInnerVectorCapacity(innerVectorIndex + count + 1);
-          delegate.consume(rs);
-          count++;
-        }
-      }
-      vector.endValue(currentIndex, count);
-      innerVectorIndex += count;
-      currentIndex++;
-    }
-  }
-}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BaseConsumer.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BaseConsumer.java
deleted file mode 100644
index 2db128d..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BaseConsumer.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc.consumer;
-
-import org.apache.arrow.vector.ValueVector;
-
-/**
- * Base class for all consumers.
- * @param <V> vector type.
- */
-public abstract class BaseConsumer<V extends ValueVector> implements JdbcConsumer<V> {
-
-  protected V vector;
-
-  protected final int columnIndexInResultSet;
-
-  protected int currentIndex;
-
-  /**
-   * Constructs a new consumer.
-   * @param vector the underlying vector for the consumer.
-   * @param index the column id for the consumer.
-   */
-  public BaseConsumer(V vector, int index) {
-    this.vector = vector;
-    this.columnIndexInResultSet = index;
-  }
-
-  @Override
-  public void close() throws Exception {
-    this.vector.close();
-  }
-
-  @Override
-  public void resetValueVector(V vector) {
-    this.vector = vector;
-    this.currentIndex = 0;
-  }
-}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BigIntConsumer.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BigIntConsumer.java
deleted file mode 100644
index 19c8efa..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BigIntConsumer.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc.consumer;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.apache.arrow.vector.BigIntVector;
-
-/**
- * Consumer which consume bigint type values from {@link ResultSet}.
- * Write the data to {@link org.apache.arrow.vector.BigIntVector}.
- */
-public class BigIntConsumer {
-
-  /**
-   * Creates a consumer for {@link BigIntVector}.
-   */
-  public static JdbcConsumer<BigIntVector> createConsumer(BigIntVector vector, int index, boolean nullable) {
-    if (nullable) {
-      return new NullableBigIntConsumer(vector, index);
-    } else {
-      return new NonNullableBigIntConsumer(vector, index);
-    }
-  }
-
-  /**
-   * Nullable consumer for big int.
-   */
-  static class NullableBigIntConsumer extends BaseConsumer<BigIntVector> {
-
-    /**
-     * Instantiate a BigIntConsumer.
-     */
-    public NullableBigIntConsumer(BigIntVector vector, int index) {
-      super(vector, index);
-    }
-
-    @Override
-    public void consume(ResultSet resultSet) throws SQLException {
-      long value = resultSet.getLong(columnIndexInResultSet);
-      if (!resultSet.wasNull()) {
-        // for fixed width vectors, we have allocated enough memory proactively,
-        // so there is no need to call the setSafe method here.
-        vector.set(currentIndex, value);
-      }
-      currentIndex++;
-    }
-  }
-
-  /**
-   * Non-nullable consumer for big int.
-   */
-  static class NonNullableBigIntConsumer extends BaseConsumer<BigIntVector> {
-
-    /**
-     * Instantiate a BigIntConsumer.
-     */
-    public NonNullableBigIntConsumer(BigIntVector vector, int index) {
-      super(vector, index);
-    }
-
-    @Override
-    public void consume(ResultSet resultSet) throws SQLException {
-      long value = resultSet.getLong(columnIndexInResultSet);
-      // for fixed width vectors, we have allocated enough memory proactively,
-      // so there is no need to call the setSafe method here.
-      vector.set(currentIndex, value);
-      currentIndex++;
-    }
-  }
-}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BinaryConsumer.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BinaryConsumer.java
deleted file mode 100644
index 8c5f611..0000000
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/consumer/BinaryConsumer.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.arrow.adapter.jdbc.consumer;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.apache.arrow.memory.ArrowBuf;
-import org.apache.arrow.vector.BitVectorHelper;
-import org.apache.arrow.vector.VarBinaryVector;
-
-/**
- * Consumer which consume binary type values from {@link ResultSet}.
- * Write the data to {@link org.apache.arrow.vector.VarBinaryVector}.
- */
-public abstract class BinaryConsumer extends BaseConsumer<VarBinaryVector> {
-
-  /**
-   * Creates a consumer for {@link VarBinaryVector}.
-   */
-  public static BinaryConsumer createConsumer(VarBinaryVector vector, int index, boolean nullable) {
-    if (nullable) {
-      return new NullableBinaryConsumer(vector, index);
-    } else {
-      return new NonNullableBinaryConsumer(vector, index);
-    }
-  }
-
-  private final byte[] reuseBytes = new byte[1024];
-
-  /**
-   * Instantiate a BinaryConsumer.
-   */
-  public BinaryConsumer(VarBinaryVector vector, int index) {
-    super(vector, index);
-    if (vector != null) {
-      vector.allocateNewSafe();
-    }
-  }
-
-  /**
-   * consume a InputStream.
-   */
-  public void consume(InputStream is) throws IOException {
-    if (is != null) {
-      while (currentIndex >= vector.getValueCapacity()) {
-        vector.reallocValidityAndOffsetBuffers();
-      }
-      final int startOffset = vector.getStartOffset(currentIndex);
-      final ArrowBuf offsetBuffer = vector.getOffsetBuffer();
-      int dataLength = 0;
-      int read;
-      while ((read = is.read(reuseBytes)) != -1) {
-        while (vector.getDataBuffer().capacity() < (startOffset + dataLength + read)) {
-          vector.reallocDataBuffer();
-        }
-        vector.getDataBuffer().setBytes(startOffset + dataLength, reuseBytes, 0, read);
-        dataLength += read;
-      }
-      offsetBuffer.setInt((currentIndex + 1) * VarBinaryVector.OFFSET_WIDTH, startOffset + dataLength);
-      BitVectorHelper.setBit(vector.getValidityBuffer(), currentIndex);
-      vector.setLastSet(currentIndex);
-    }
-  }
-
-  public void moveWriterPosition() {
-    currentIndex++;
-  }
-
-  @Override
-  public void resetValueVector(VarBinaryVector vector) {
-    this.vector = vector;
-    this.vector.allocateNewSafe();
-    this.currentIndex = 0;
-  }
-
-  /**
-   * Consumer for nullable binary data.
-   */
-  static class NullableBinaryConsumer extends BinaryConsumer {
-    
-    /**
-     * Instantiate a BinaryConsumer.
-     */
-    public NullableBinaryConsumer(VarBinaryVector vector, int index) {
-      super(vector, index);
-    }
-
-    @Override
-    public void consume(ResultSet resultSet) throws SQLException, IOException {
-      InputStream is = resultSet.getBinaryStream(columnIndexInResultSet);
-      if (!resultSet.wasNull()) {
-        consume(is);
-      }
... 148455 lines suppressed ...

[arrow-rs] 02/14: Removed R language.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit d131362e1b4ada7bf777c44a818a4bef140161a2
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:19:30 2021 +0000

    Removed R language.
---
 r/.Rbuildignore                                    |   26 -
 r/.gitignore                                       |   20 -
 r/DESCRIPTION                                      |  102 -
 r/Makefile                                         |   53 -
 r/NAMESPACE                                        |  346 -
 r/NEWS.md                                          |  358 -
 r/R/array-data.R                                   |   53 -
 r/R/array.R                                        |  301 -
 r/R/arrow-datum.R                                  |  165 -
 r/R/arrow-package.R                                |  278 -
 r/R/arrow-tabular.R                                |  220 -
 r/R/arrowExports.R                                 | 1668 -----
 r/R/buffer.R                                       |   72 -
 r/R/chunked-array.R                                |  132 -
 r/R/compression.R                                  |  121 -
 r/R/compute.R                                      |  257 -
 r/R/config.R                                       |   30 -
 r/R/csv.R                                          |  587 --
 r/R/dataset-factory.R                              |  169 -
 r/R/dataset-format.R                               |  320 -
 r/R/dataset-partition.R                            |  125 -
 r/R/dataset-scan.R                                 |  202 -
 r/R/dataset-write.R                                |   99 -
 r/R/dataset.R                                      |  320 -
 r/R/deprecated.R                                   |   40 -
 r/R/dictionary.R                                   |   69 -
 r/R/dplyr.R                                        | 1101 ---
 r/R/enums.R                                        |  142 -
 r/R/expression.R                                   |  346 -
 r/R/feather.R                                      |  221 -
 r/R/field.R                                        |   82 -
 r/R/filesystem.R                                   |  510 --
 r/R/flight.R                                       |  121 -
 r/R/install-arrow.R                                |  139 -
 r/R/io.R                                           |  290 -
 r/R/ipc_stream.R                                   |  113 -
 r/R/json.R                                         |  104 -
 r/R/memory-pool.R                                  |   61 -
 r/R/message.R                                      |   95 -
 r/R/metadata.R                                     |  132 -
 r/R/parquet.R                                      |  575 --
 r/R/python.R                                       |  159 -
 r/R/record-batch-reader.R                          |  155 -
 r/R/record-batch-writer.R                          |  196 -
 r/R/record-batch.R                                 |  197 -
 r/R/reexports-bit64.R                              |   22 -
 r/R/reexports-tidyselect.R                         |   43 -
 r/R/scalar.R                                       |   79 -
 r/R/schema.R                                       |  302 -
 r/R/table.R                                        |  179 -
 r/R/type.R                                         |  484 --
 r/R/util.R                                         |  112 -
 r/README.md                                        |  306 -
 r/_pkgdown.yml                                     |  177 -
 r/arrow.Rproj                                      |   21 -
 r/cleanup                                          |   21 -
 r/configure                                        |  277 -
 r/configure.win                                    |   73 -
 r/cran-comments.md                                 |   10 -
 r/data-raw/codegen.R                               |  254 -
 r/extra-tests/helpers.R                            |   36 -
 r/extra-tests/test-read-files.R                    |  197 -
 r/extra-tests/write-files.R                        |   42 -
 r/inst/NOTICE.txt                                  |   84 -
 r/inst/build_arrow_static.sh                       |   79 -
 r/inst/demo_flight_server.py                       |  120 -
 r/inst/v0.7.1.parquet                              |  Bin 4372 -> 0 bytes
 r/lint.sh                                          |   41 -
 r/man/ArrayData.Rd                                 |   27 -
 r/man/ChunkedArray.Rd                              |   58 -
 r/man/Codec.Rd                                     |   24 -
 r/man/CsvReadOptions.Rd                            |  100 -
 r/man/CsvTableReader.Rd                            |   32 -
 r/man/DataType.Rd                                  |   15 -
 r/man/Dataset.Rd                                   |   81 -
 r/man/DictionaryType.Rd                            |   15 -
 r/man/Expression.Rd                                |   18 -
 r/man/FeatherReader.Rd                             |   33 -
 r/man/Field.Rd                                     |   35 -
 r/man/FileFormat.Rd                                |   56 -
 r/man/FileInfo.Rd                                  |   28 -
 r/man/FileSelector.Rd                              |   27 -
 r/man/FileSystem.Rd                                |   99 -
 r/man/FileWriteOptions.Rd                          |    8 -
 r/man/FixedWidthType.Rd                            |   15 -
 r/man/FragmentScanOptions.Rd                       |   29 -
 r/man/InputStream.Rd                               |   45 -
 r/man/MemoryPool.Rd                                |   24 -
 r/man/Message.Rd                                   |   15 -
 r/man/MessageReader.Rd                             |   15 -
 r/man/OutputStream.Rd                              |   38 -
 r/man/ParquetArrowReaderProperties.Rd              |   29 -
 r/man/ParquetFileReader.Rd                         |   58 -
 r/man/ParquetFileWriter.Rd                         |   31 -
 r/man/ParquetWriterProperties.Rd                   |   49 -
 r/man/Partitioning.Rd                              |   51 -
 r/man/RecordBatch.Rd                               |   92 -
 r/man/RecordBatchReader.Rd                         |   86 -
 r/man/RecordBatchWriter.Rd                         |   89 -
 r/man/Scalar.Rd                                    |    9 -
 r/man/Scanner.Rd                                   |   47 -
 r/man/Schema.Rd                                    |   85 -
 r/man/Table.Rd                                     |   92 -
 r/man/array.Rd                                     |   84 -
 r/man/arrow-package.Rd                             |   41 -
 r/man/arrow_available.Rd                           |   42 -
 r/man/arrow_info.Rd                                |   17 -
 r/man/buffer.Rd                                    |   35 -
 r/man/call_function.Rd                             |   46 -
 r/man/cast_options.Rd                              |   22 -
 r/man/codec_is_available.Rd                        |   20 -
 r/man/compression.Rd                               |   31 -
 r/man/copy_files.Rd                                |   35 -
 r/man/cpu_count.Rd                                 |   17 -
 r/man/data-type.Rd                                 |  163 -
 r/man/dataset_factory.Rd                           |   76 -
 r/man/default_memory_pool.Rd                       |   15 -
 r/man/dictionary.Rd                                |   24 -
 r/man/enums.Rd                                     |   73 -
 r/man/flight_connect.Rd                            |   21 -
 r/man/flight_get.Rd                                |   19 -
 r/man/flight_put.Rd                                |   25 -
 r/man/hive_partition.Rd                            |   32 -
 r/man/install_arrow.Rd                             |   61 -
 r/man/install_pyarrow.Rd                           |   22 -
 r/man/list_compute_functions.Rd                    |   39 -
 r/man/list_flights.Rd                              |   23 -
 r/man/load_flight_server.Rd                        |   17 -
 r/man/make_readable_file.Rd                        |   29 -
 r/man/map_batches.Rd                               |   30 -
 r/man/match_arrow.Rd                               |   28 -
 r/man/mmap_create.Rd                               |   19 -
 r/man/mmap_open.Rd                                 |   16 -
 r/man/open_dataset.Rd                              |   79 -
 r/man/read_delim_arrow.Rd                          |  218 -
 r/man/read_feather.Rd                              |   50 -
 r/man/read_ipc_stream.Rd                           |   42 -
 r/man/read_json_arrow.Rd                           |   52 -
 r/man/read_message.Rd                              |   14 -
 r/man/read_parquet.Rd                              |   50 -
 r/man/read_schema.Rd                               |   19 -
 r/man/reexports.Rd                                 |   28 -
 r/man/s3_bucket.Rd                                 |   28 -
 r/man/type.Rd                                      |   17 -
 r/man/unify_schemas.Rd                             |   26 -
 r/man/value_counts.Rd                              |   18 -
 r/man/write_dataset.Rd                             |   66 -
 r/man/write_feather.Rd                             |   61 -
 r/man/write_ipc_stream.Rd                          |   38 -
 r/man/write_parquet.Rd                             |  108 -
 r/man/write_to_raw.Rd                              |   22 -
 r/pkgdown/extra.js                                 |   65 -
 r/src/.clang-format                                |   20 -
 r/src/.gitignore                                   |    3 -
 r/src/Makevars.in                                  |   29 -
 r/src/array.cpp                                    |  286 -
 r/src/array_to_vector.cpp                          | 1330 ----
 r/src/arraydata.cpp                                |   49 -
 r/src/arrowExports.cpp                             | 7032 --------------------
 r/src/arrow_cpp11.h                                |  377 --
 r/src/arrow_types.h                                |  237 -
 r/src/arrow_vctrs.h                                |   22 -
 r/src/buffer.cpp                                   |   71 -
 r/src/chunkedarray.cpp                             |  139 -
 r/src/compression.cpp                              |   56 -
 r/src/compute.cpp                                  |  301 -
 r/src/csv.cpp                                      |  177 -
 r/src/dataset.cpp                                  |  486 --
 r/src/datatype.cpp                                 |  426 --
 r/src/expression.cpp                               |   71 -
 r/src/feather.cpp                                  |   87 -
 r/src/field.cpp                                    |   56 -
 r/src/filesystem.cpp                               |  329 -
 r/src/imports.cpp                                  |   43 -
 r/src/io.cpp                                       |  181 -
 r/src/json.cpp                                     |   67 -
 r/src/memorypool.cpp                               |   92 -
 r/src/message.cpp                                  |  105 -
 r/src/nameof.h                                     |   93 -
 r/src/parquet.cpp                                  |  326 -
 r/src/py-to-r.cpp                                  |   81 -
 r/src/r_to_arrow.cpp                               | 1046 ---
 r/src/recordbatch.cpp                              |  326 -
 r/src/recordbatchreader.cpp                        |  130 -
 r/src/recordbatchwriter.cpp                        |   67 -
 r/src/runtimeinfo.cpp                              |   30 -
 r/src/scalar.cpp                                   |   97 -
 r/src/schema.cpp                                   |  146 -
 r/src/symbols.cpp                                  |   86 -
 r/src/table.cpp                                    |  350 -
 r/src/threadpool.cpp                               |   51 -
 r/src/type_infer.cpp                               |  202 -
 r/tests/testthat.R                                 |   27 -
 .../data-arrow-extra-meta_3.0.0.parquet            |  Bin 7862 -> 0 bytes
 .../golden-files/data-arrow_0.17.0_lz4.feather     |  Bin 1650 -> 0 bytes
 .../data-arrow_0.17.0_uncompressed.feather         |  Bin 1354 -> 0 bytes
 .../golden-files/data-arrow_0.17.0_zstd.feather    |  Bin 1626 -> 0 bytes
 .../testthat/golden-files/data-arrow_1.0.1.parquet |  Bin 3603 -> 0 bytes
 .../golden-files/data-arrow_1.0.1_lz4.feather      |  Bin 2858 -> 0 bytes
 .../data-arrow_1.0.1_uncompressed.feather          |  Bin 2626 -> 0 bytes
 .../golden-files/data-arrow_1.0.1_zstd.feather     |  Bin 2842 -> 0 bytes
 .../testthat/golden-files/data-arrow_2.0.0.parquet |  Bin 3965 -> 0 bytes
 .../golden-files/data-arrow_2.0.0_lz4.feather      |  Bin 3162 -> 0 bytes
 .../data-arrow_2.0.0_uncompressed.feather          |  Bin 2930 -> 0 bytes
 .../golden-files/data-arrow_2.0.0_zstd.feather     |  Bin 3146 -> 0 bytes
 r/tests/testthat/helper-arrow.R                    |   69 -
 r/tests/testthat/helper-data.R                     |  169 -
 r/tests/testthat/helper-expectation.R              |  191 -
 r/tests/testthat/helper-parquet.R                  |   29 -
 r/tests/testthat/helper-roundtrip.R                |   43 -
 r/tests/testthat/helper-skip.R                     |   55 -
 r/tests/testthat/latin1.R                          |   76 -
 r/tests/testthat/test-Array.R                      |  820 ---
 r/tests/testthat/test-RecordBatch.R                |  501 --
 r/tests/testthat/test-Table.R                      |  477 --
 r/tests/testthat/test-array-data.R                 |   35 -
 r/tests/testthat/test-arrow-info.R                 |   23 -
 r/tests/testthat/test-arrow.R                      |   74 -
 r/tests/testthat/test-backwards-compatibility.R    |  117 -
 r/tests/testthat/test-buffer-reader.R              |   40 -
 r/tests/testthat/test-buffer.R                     |   99 -
 r/tests/testthat/test-chunked-array.R              |  413 --
 r/tests/testthat/test-chunked-array.txt            |  103 -
 r/tests/testthat/test-compressed.R                 |   75 -
 r/tests/testthat/test-compute-aggregate.R          |  353 -
 r/tests/testthat/test-compute-arith.R              |  116 -
 r/tests/testthat/test-compute-sort.R               |  165 -
 r/tests/testthat/test-compute-vector.R             |  133 -
 r/tests/testthat/test-csv.R                        |  258 -
 r/tests/testthat/test-data-type.R                  |  413 --
 r/tests/testthat/test-dataset.R                    | 1786 -----
 r/tests/testthat/test-dplyr-arrange.R              |  211 -
 r/tests/testthat/test-dplyr-filter.R               |  417 --
 r/tests/testthat/test-dplyr-group-by.R             |  135 -
 r/tests/testthat/test-dplyr-mutate.R               |  417 --
 r/tests/testthat/test-dplyr-string-functions.R     |  347 -
 r/tests/testthat/test-dplyr.R                      |  591 --
 r/tests/testthat/test-expression.R                 |   99 -
 r/tests/testthat/test-feather.R                    |  242 -
 r/tests/testthat/test-field.R                      |   38 -
 r/tests/testthat/test-filesystem.R                 |  175 -
 r/tests/testthat/test-install-arrow.R              |   39 -
 r/tests/testthat/test-json.R                       |  250 -
 r/tests/testthat/test-memory-pool.R                |   26 -
 r/tests/testthat/test-message-reader.R             |   86 -
 r/tests/testthat/test-message.R                    |   64 -
 r/tests/testthat/test-metadata.R                   |  207 -
 r/tests/testthat/test-parquet.R                    |  254 -
 r/tests/testthat/test-python-flight.R              |   63 -
 r/tests/testthat/test-python.R                     |  102 -
 r/tests/testthat/test-read-record-batch.R          |   79 -
 r/tests/testthat/test-read-write.R                 |  126 -
 r/tests/testthat/test-record-batch-reader.R        |  142 -
 r/tests/testthat/test-s3-minio.R                   |  229 -
 r/tests/testthat/test-s3.R                         |   53 -
 r/tests/testthat/test-scalar.R                     |  104 -
 r/tests/testthat/test-schema.R                     |  176 -
 r/tests/testthat/test-thread-pool.R                |   26 -
 r/tests/testthat/test-type.R                       |  108 -
 r/tests/testthat/test-utf.R                        |   25 -
 r/tools/autobrew                                   |   66 -
 r/tools/nixlibs.R                                  |  506 --
 r/tools/ubsan.supp                                 |   18 -
 r/tools/winlibs.R                                  |   65 -
 r/vignettes/arrow.Rmd                              |  206 -
 r/vignettes/dataset.Rmd                            |  397 --
 r/vignettes/developing.Rmd                         |  520 --
 r/vignettes/flight.Rmd                             |   78 -
 r/vignettes/fs.Rmd                                 |  130 -
 r/vignettes/install.Rmd                            |  366 -
 r/vignettes/python.Rmd                             |  131 -
 271 files changed, 46586 deletions(-)

diff --git a/r/.Rbuildignore b/r/.Rbuildignore
deleted file mode 100644
index cf4b7ce..0000000
--- a/r/.Rbuildignore
+++ /dev/null
@@ -1,26 +0,0 @@
-^.*\.Rproj$
-^\.Rproj\.user$
-^README\.Rmd$
-src/.clang-format
-LICENSE.md
-^data-raw$
-lint.sh
-Dockerfile
-.*\.tar\.gz
-^windows
-^libarrow
-^revdep
-clang_format.sh
-^cran-comments\.md$
-^arrow_.*.tar.gz$
-^arrow_.*.tgz$
-^_pkgdown\.yml$
-^docs$
-^pkgdown$
-^Makefile$
-^.*\.orig$
-^.*\.cmd$
-^autobrew$
-^apache-arrow.rb$
-^.*\.Rhistory$
-^extra-tests
diff --git a/r/.gitignore b/r/.gitignore
deleted file mode 100644
index 76e8a8d..0000000
--- a/r/.gitignore
+++ /dev/null
@@ -1,20 +0,0 @@
-Meta
-docs/
-inst/doc
-*.o
-*.o-*
-*.d
-*.so
-*.dll
-.RData
-.Rproj.user
-.Rhistory
-src/Makevars
-src/Makevars.win
-windows/
-libarrow/
-revdep/
-vignettes/nyc-taxi/
-arrow_*.tar.gz
-arrow_*.tgz
-extra-tests/files
diff --git a/r/DESCRIPTION b/r/DESCRIPTION
deleted file mode 100644
index 7a63b9e..0000000
--- a/r/DESCRIPTION
+++ /dev/null
@@ -1,102 +0,0 @@
-Package: arrow
-Title: Integration to 'Apache' 'Arrow'
-Version: 3.0.0.9000
-Authors@R: c(
-    person("Neal", "Richardson", email = "neal@ursalabs.org", role = c("aut", "cre")),
-    person("Ian", "Cook", email = "ianmcook@gmail.com", role = c("aut")),
-    person("Jonathan", "Keane", email = "jkeane@gmail.com", role = c("aut")),
-    person("Romain", "Fran\u00e7ois", email = "romain@rstudio.com", role = c("aut"), comment = c(ORCID = "0000-0002-2444-4226")),
-    person("Jeroen", "Ooms", email = "jeroen@berkeley.edu", role = c("aut")),
-    person("Javier", "Luraschi", email = "javier@rstudio.com", role = c("ctb")),
-    person("Jeffrey", "Wong", email = "jeffreyw@netflix.com", role = c("ctb")),
-    person("Apache Arrow", email = "dev@arrow.apache.org", role = c("aut", "cph"))
-  )
-Description: 'Apache' 'Arrow' <https://arrow.apache.org/> is a cross-language
-    development platform for in-memory data. It specifies a standardized
-    language-independent columnar memory format for flat and hierarchical data,
-    organized for efficient analytic operations on modern hardware. This
-    package provides an interface to the 'Arrow C++' library.
-Depends: R (>= 3.3)
-License: Apache License (>= 2.0)
-URL: https://github.com/apache/arrow/, https://arrow.apache.org/docs/r/
-BugReports: https://issues.apache.org/jira/projects/ARROW/issues
-Encoding: UTF-8
-Language: en-US
-SystemRequirements: C++11; for AWS S3 support on Linux, libcurl and openssl (optional)
-Biarch: true
-Imports:
-    assertthat,
-    bit64 (>= 0.9-7),
-    methods,
-    purrr,
-    R6,
-    rlang,
-    stats,
-    tidyselect,
-    utils,
-    vctrs
-Roxygen: list(markdown = TRUE, r6 = FALSE, load = "source")
-RoxygenNote: 7.1.1
-VignetteBuilder: knitr
-Suggests:
-    decor,
-    distro,
-    dplyr,
-    hms,
-    knitr,
-    lubridate,
-    pkgload,
-    reticulate,
-    rmarkdown,
-    stringr,
-    testthat,
-    tibble,
-    withr
-LinkingTo: cpp11 (>= 0.2.0)
-Collate:
-    'enums.R'
-    'arrow-package.R'
-    'type.R'
-    'array-data.R'
-    'arrow-datum.R'
-    'array.R'
-    'arrow-tabular.R'
-    'arrowExports.R'
-    'buffer.R'
-    'chunked-array.R'
-    'io.R'
-    'compression.R'
-    'scalar.R'
-    'compute.R'
-    'config.R'
-    'csv.R'
-    'dataset.R'
-    'dataset-factory.R'
-    'dataset-format.R'
-    'dataset-partition.R'
-    'dataset-scan.R'
-    'dataset-write.R'
-    'deprecated.R'
-    'dictionary.R'
-    'record-batch.R'
-    'table.R'
-    'expression.R'
-    'dplyr.R'
-    'feather.R'
-    'field.R'
-    'filesystem.R'
-    'flight.R'
-    'install-arrow.R'
-    'ipc_stream.R'
-    'json.R'
-    'memory-pool.R'
-    'message.R'
-    'metadata.R'
-    'parquet.R'
-    'python.R'
-    'record-batch-reader.R'
-    'record-batch-writer.R'
-    'reexports-bit64.R'
-    'reexports-tidyselect.R'
-    'schema.R'
-    'util.R'
diff --git a/r/Makefile b/r/Makefile
deleted file mode 100644
index efc55ab..0000000
--- a/r/Makefile
+++ /dev/null
@@ -1,53 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-VERSION=$(shell grep ^Version DESCRIPTION | sed s/Version:\ //)
-ARROW_R_DEV="TRUE"
-ARROW_LARGE_MEMORY_TESTS=$(ARROW_R_DEV)
-
-doc:
-	R -s -e 'roxygen2::roxygenize()'
-	-git add --all man/*.Rd
-
-test:
-	export ARROW_R_DEV=$(ARROW_R_DEV) && R CMD INSTALL --install-tests --no-test-load --no-docs --no-help --no-byte-compile .
-	export NOT_CRAN=true && export ARROW_R_DEV=$(ARROW_R_DEV) && export AWS_EC2_METADATA_DISABLED=TRUE && export ARROW_LARGE_MEMORY_TESTS=$(ARROW_LARGE_MEMORY_TESTS) && R -s -e 'library(testthat); setwd(file.path(.libPaths()[1], "arrow", "tests")); system.time(test_check("arrow", filter="${file}", reporter=ifelse(nchar("${r}"), "${r}", "summary")))'
-
-deps:
-	R -s -e 'lib <- Sys.getenv("R_LIB", .libPaths()[1]); install.packages("devtools", repo="https://cloud.r-project.org", lib=lib); devtools::install_dev_deps(lib=lib)'
-
-build: doc
-	cp ../NOTICE.txt inst/NOTICE.txt
-	R CMD build .
-
-check: build
-	-export _R_CHECK_CRAN_INCOMING_REMOTE_=FALSE && export ARROW_R_DEV=$(ARROW_R_DEV) && export _R_CHECK_TESTS_NLINES_=0 && R CMD check --as-cran --run-donttest arrow_$(VERSION).tar.gz
-	rm -rf arrow.Rcheck/
-
-release: build
-	-export _R_CHECK_TESTS_NLINES_=0 && R CMD check --as-cran --run-donttest arrow_$(VERSION).tar.gz
-	rm -rf arrow.Rcheck/
-
-clean:
-	-rm src/*.o
-	-rm src/*.so
-	-rm src/*.dll
-	-rm src/Makevars
-	-rm src/Makevars.win
-	-rm -rf arrow.Rcheck/
-	-rm -rf libarrow/
-	-find . -name "*.orig" -delete
diff --git a/r/NAMESPACE b/r/NAMESPACE
deleted file mode 100644
index 5673538..0000000
--- a/r/NAMESPACE
+++ /dev/null
@@ -1,346 +0,0 @@
-# Generated by roxygen2: do not edit by hand
-
-S3method("!=",ArrowObject)
-S3method("$",ArrowTabular)
-S3method("$",Schema)
-S3method("$",StructArray)
-S3method("$",SubTreeFileSystem)
-S3method("$<-",ArrowTabular)
-S3method("$<-",Schema)
-S3method("==",ArrowObject)
-S3method("[",ArrowDatum)
-S3method("[",ArrowTabular)
-S3method("[",Dataset)
-S3method("[",Schema)
-S3method("[",arrow_dplyr_query)
-S3method("[[",ArrowTabular)
-S3method("[[",Schema)
-S3method("[[",StructArray)
-S3method("[[<-",ArrowTabular)
-S3method("[[<-",Schema)
-S3method("names<-",ArrowTabular)
-S3method(Ops,ArrowDatum)
-S3method(Ops,Expression)
-S3method(Ops,array_expression)
-S3method(all,equal.ArrowObject)
-S3method(as.character,ArrowDatum)
-S3method(as.character,FileFormat)
-S3method(as.character,FragmentScanOptions)
-S3method(as.data.frame,ArrowTabular)
-S3method(as.data.frame,StructArray)
-S3method(as.data.frame,arrow_dplyr_query)
-S3method(as.double,ArrowDatum)
-S3method(as.integer,ArrowDatum)
-S3method(as.list,ArrowTabular)
-S3method(as.list,Schema)
-S3method(as.raw,Buffer)
-S3method(as.vector,ArrowDatum)
-S3method(as.vector,array_expression)
-S3method(c,Dataset)
-S3method(dim,ArrowTabular)
-S3method(dim,Dataset)
-S3method(dim,StructArray)
-S3method(dim,arrow_dplyr_query)
-S3method(dimnames,ArrowTabular)
-S3method(head,ArrowDatum)
-S3method(head,ArrowTabular)
-S3method(head,Dataset)
-S3method(head,arrow_dplyr_query)
-S3method(is.na,ArrowDatum)
-S3method(is.na,Expression)
-S3method(is.na,Scalar)
-S3method(is.na,array_expression)
-S3method(is.nan,ArrowDatum)
-S3method(is_in,ArrowDatum)
-S3method(is_in,default)
-S3method(length,ArrowDatum)
-S3method(length,Scalar)
-S3method(length,Schema)
-S3method(match_arrow,ArrowDatum)
-S3method(match_arrow,default)
-S3method(max,ArrowDatum)
-S3method(mean,ArrowDatum)
-S3method(median,ArrowDatum)
-S3method(min,ArrowDatum)
-S3method(names,Dataset)
-S3method(names,FeatherReader)
-S3method(names,RecordBatch)
-S3method(names,Scanner)
-S3method(names,ScannerBuilder)
-S3method(names,Schema)
-S3method(names,StructArray)
-S3method(names,Table)
-S3method(names,arrow_dplyr_query)
-S3method(print,"arrow-enum")
-S3method(print,array_expression)
-S3method(print,arrow_dplyr_query)
-S3method(print,arrow_info)
-S3method(print,arrow_r_metadata)
-S3method(quantile,ArrowDatum)
-S3method(read_message,InputStream)
-S3method(read_message,MessageReader)
-S3method(read_message,default)
-S3method(row.names,ArrowTabular)
-S3method(sort,ArrowDatum)
-S3method(sort,Scalar)
-S3method(sum,ArrowDatum)
-S3method(tail,ArrowDatum)
-S3method(tail,ArrowTabular)
-S3method(tail,Dataset)
-S3method(tail,arrow_dplyr_query)
-S3method(type,ArrowDatum)
-S3method(type,default)
-S3method(unique,ArrowDatum)
-S3method(vec_ptype_abbr,arrow_fixed_size_binary)
-S3method(vec_ptype_abbr,arrow_fixed_size_list)
-S3method(vec_ptype_abbr,arrow_large_list)
-S3method(vec_ptype_abbr,arrow_list)
-S3method(vec_ptype_full,arrow_fixed_size_binary)
-S3method(vec_ptype_full,arrow_fixed_size_list)
-S3method(vec_ptype_full,arrow_large_list)
-S3method(vec_ptype_full,arrow_list)
-export(Array)
-export(Buffer)
-export(BufferOutputStream)
-export(BufferReader)
-export(ChunkedArray)
-export(Codec)
-export(CompressedInputStream)
-export(CompressedOutputStream)
-export(CompressionType)
-export(CsvConvertOptions)
-export(CsvFileFormat)
-export(CsvFragmentScanOptions)
-export(CsvParseOptions)
-export(CsvReadOptions)
-export(CsvTableReader)
-export(Dataset)
-export(DatasetFactory)
-export(DateUnit)
-export(DictionaryArray)
-export(DirectoryPartitioning)
-export(DirectoryPartitioningFactory)
-export(Expression)
-export(FeatherReader)
-export(Field)
-export(FileFormat)
-export(FileInfo)
-export(FileMode)
-export(FileOutputStream)
-export(FileSelector)
-export(FileSystem)
-export(FileSystemDataset)
-export(FileSystemDatasetFactory)
-export(FileType)
-export(FixedSizeListArray)
-export(FixedSizeListType)
-export(FragmentScanOptions)
-export(HivePartitioning)
-export(HivePartitioningFactory)
-export(InMemoryDataset)
-export(IpcFileFormat)
-export(JsonParseOptions)
-export(JsonReadOptions)
-export(JsonTableReader)
-export(LargeListArray)
-export(ListArray)
-export(LocalFileSystem)
-export(MemoryMappedFile)
-export(MessageReader)
-export(MessageType)
-export(MetadataVersion)
-export(NullEncodingBehavior)
-export(ParquetArrowReaderProperties)
-export(ParquetFileFormat)
-export(ParquetFileReader)
-export(ParquetFileWriter)
-export(ParquetVersionType)
-export(ParquetWriterProperties)
-export(Partitioning)
-export(QuantileInterpolation)
-export(RandomAccessFile)
-export(ReadableFile)
-export(RecordBatch)
-export(RecordBatchFileReader)
-export(RecordBatchFileWriter)
-export(RecordBatchStreamReader)
-export(RecordBatchStreamWriter)
-export(S3FileSystem)
-export(Scalar)
-export(Scanner)
-export(ScannerBuilder)
-export(Schema)
-export(StatusCode)
-export(StructArray)
-export(StructScalar)
-export(SubTreeFileSystem)
-export(Table)
-export(TimeUnit)
-export(TimestampParser)
-export(Type)
-export(UnionDataset)
-export(arrow_available)
-export(arrow_info)
-export(arrow_with_dataset)
-export(arrow_with_parquet)
-export(arrow_with_s3)
-export(binary)
-export(bool)
-export(boolean)
-export(buffer)
-export(call_function)
-export(cast_options)
-export(chunked_array)
-export(codec_is_available)
-export(contains)
-export(copy_files)
-export(cpu_count)
-export(dataset_factory)
-export(date32)
-export(date64)
-export(decimal)
-export(default_memory_pool)
-export(dictionary)
-export(ends_with)
-export(everything)
-export(field)
-export(fixed_size_binary)
-export(fixed_size_list_of)
-export(flight_connect)
-export(flight_get)
-export(flight_path_exists)
-export(flight_put)
-export(float)
-export(float16)
-export(float32)
-export(float64)
-export(halffloat)
-export(hive_partition)
-export(install_arrow)
-export(install_pyarrow)
-export(int16)
-export(int32)
-export(int64)
-export(int8)
-export(is_in)
-export(large_binary)
-export(large_list_of)
-export(large_utf8)
-export(last_col)
-export(list_compute_functions)
-export(list_flights)
-export(list_of)
-export(load_flight_server)
-export(map_batches)
-export(match_arrow)
-export(matches)
-export(mmap_create)
-export(mmap_open)
-export(null)
-export(num_range)
-export(one_of)
-export(open_dataset)
-export(read_arrow)
-export(read_csv_arrow)
-export(read_delim_arrow)
-export(read_feather)
-export(read_ipc_stream)
-export(read_json_arrow)
-export(read_message)
-export(read_parquet)
-export(read_schema)
-export(read_tsv_arrow)
-export(record_batch)
-export(s3_bucket)
-export(schema)
-export(set_cpu_count)
-export(starts_with)
-export(string)
-export(struct)
-export(time32)
-export(time64)
-export(timestamp)
-export(type)
-export(uint16)
-export(uint32)
-export(uint64)
-export(uint8)
-export(unify_schemas)
-export(utf8)
-export(value_counts)
-export(write_arrow)
-export(write_dataset)
-export(write_feather)
-export(write_ipc_stream)
-export(write_parquet)
-export(write_to_raw)
-importFrom(R6,R6Class)
-importFrom(assertthat,assert_that)
-importFrom(assertthat,is.string)
-importFrom(bit64,print.integer64)
-importFrom(bit64,str.integer64)
-importFrom(methods,as)
-importFrom(purrr,as_mapper)
-importFrom(purrr,keep)
-importFrom(purrr,map)
-importFrom(purrr,map2)
-importFrom(purrr,map_chr)
-importFrom(purrr,map_dfr)
-importFrom(purrr,map_int)
-importFrom(purrr,map_lgl)
-importFrom(rlang,"%||%")
-importFrom(rlang,.data)
-importFrom(rlang,abort)
-importFrom(rlang,as_label)
-importFrom(rlang,dots_n)
-importFrom(rlang,enexpr)
-importFrom(rlang,enexprs)
-importFrom(rlang,enquo)
-importFrom(rlang,enquos)
-importFrom(rlang,env)
-importFrom(rlang,env_bind)
-importFrom(rlang,eval_tidy)
-importFrom(rlang,exec)
-importFrom(rlang,expr)
-importFrom(rlang,is_bare_character)
-importFrom(rlang,is_false)
-importFrom(rlang,is_integerish)
-importFrom(rlang,is_quosure)
-importFrom(rlang,list2)
-importFrom(rlang,new_data_mask)
-importFrom(rlang,new_environment)
-importFrom(rlang,quo_get_expr)
-importFrom(rlang,quo_is_null)
-importFrom(rlang,quo_set_expr)
-importFrom(rlang,quos)
-importFrom(rlang,seq2)
-importFrom(rlang,set_names)
-importFrom(rlang,syms)
-importFrom(rlang,warn)
-importFrom(stats,median)
-importFrom(stats,quantile)
-importFrom(tidyselect,contains)
-importFrom(tidyselect,ends_with)
-importFrom(tidyselect,eval_select)
-importFrom(tidyselect,everything)
-importFrom(tidyselect,last_col)
-importFrom(tidyselect,matches)
-importFrom(tidyselect,num_range)
-importFrom(tidyselect,one_of)
-importFrom(tidyselect,starts_with)
-importFrom(tidyselect,vars_pull)
-importFrom(tidyselect,vars_rename)
-importFrom(tidyselect,vars_select)
-importFrom(utils,head)
-importFrom(utils,install.packages)
-importFrom(utils,modifyList)
-importFrom(utils,object.size)
-importFrom(utils,packageVersion)
-importFrom(utils,tail)
-importFrom(vctrs,s3_register)
-importFrom(vctrs,vec_cast)
-importFrom(vctrs,vec_ptype_abbr)
-importFrom(vctrs,vec_ptype_full)
-importFrom(vctrs,vec_size)
-importFrom(vctrs,vec_unique)
-useDynLib(arrow, .registration = TRUE)
diff --git a/r/NEWS.md b/r/NEWS.md
deleted file mode 100644
index c064ac7..0000000
--- a/r/NEWS.md
+++ /dev/null
@@ -1,358 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# arrow 3.0.0.9000
-
-## dplyr methods
-
-Many more `dplyr` verbs are supported on Arrow objects:
-
-* `dplyr::mutate()` is now supported in Arrow for many applications. For queries on `Table` and `RecordBatch` that are not yet supported in Arrow, the implementation falls back to pulling data into an in-memory R `data.frame` first, as in the previous release. For queries on `Dataset` (which can be larger than memory), it raises an error if the function is not implemented. The main `mutate()` features that cannot yet be called on Arrow objects are (1) `mutate()` after `group_by()` (which [...]
-* `dplyr::transmute()` (which calls `mutate()`)
-* `dplyr::group_by()` now preserves the `.drop()` argument and supports on-the-fly definition of columns
-* `dplyr::relocate()` to reorder columns
-* `dplyr::arrange()` to sort rows
-* `dplyr::compute()` to evaluate the lazy expressions and return an Arrow Table. This is equivalent to `dplyr::collect(as_data_frame = FALSE)`, which was added in 2.0.0.
-
-Over 100 functions can now be called on Arrow objects inside a `dplyr` verb:
-
-* String functions `nchar()`, `tolower()`, and `toupper()`, along with their `stringr` spellings `str_length()`, `str_to_lower()`, and `str_to_upper()`, are supported in Arrow `dplyr` calls. `str_trim()` is also supported.
-* Regular expression functions `sub()`, `gsub()`, and `grepl()`, along with `str_replace()`, `str_replace_all()`, and `str_detect()`, are supported.
-* `cast(x, type)` and `dictionary_encode()` allow changing the type of columns in Arrow objects; `as.numeric()`, `as.character()`, etc. are exposed as similar type-altering conveniences
-* `dplyr::between()`; the Arrow version also allows the `left` and `right` arguments to be columns in the data and not just scalars
-* Additionally, any Arrow C++ compute function can be called inside a `dplyr` verb. This enables you to access Arrow functions that don't have a direct R mapping. See `list_compute_functions()` for all available functions, which are available in `dplyr` prefixed by `arrow_`.
-* Arrow C++ compute functions now do more systematic type promotion when called on data with different types (e.g. int32 and float64). Previously, Scalars in an expressions were always cast to match the type of the corresponding Array, so this new type promotion enables, among other things, operations on two columns (Arrays) in a dataset. As a side effect, some comparisons that worked in prior versions are no longer supported: for example, `dplyr::filter(arrow_dataset, string_column == 3 [...]
-
-## Datasets
-
-* `open_dataset()` now accepts a vector of file paths (or even a single file path). Among other things, this enables you to open a single very large file and use `write_dataset()` to partition it without having to read the whole file into memory.
-* Datasets can now detect and read a directory of compressed CSVs
-* `write_dataset()` now defaults to `format = "parquet"` and better validates the `format` argument
-* Invalid input for `schema` in `open_dataset()` is now correctly handled
-* Collecting 0 columns from a Dataset now no longer returns all of the columns
-* The `Scanner$Scan()` method has been removed; use `Scanner$ScanBatches()`
-
-## Other improvements
-
-* `value_counts()` to tabulate values in an `Array` or `ChunkedArray`, similar to `base::table()`.
-* `StructArray` objects gain data.frame-like methods, including `names()`, `$`, `[[`, and `dim()`.
-* RecordBatch columns can now be added, replaced, or removed by assigning (`<-`) with either `$` or `[[`
-* Similarly, `Schema` can now be edited by assigning in new types. This enables using the CSV reader to detect the schema of a file, modify the `Schema` object for any columns that you want to read in as a different type, and then use that `Schema` to read the data.
-* Better validation when creating a `Table` with a schema, with columns of different lengths, and with scalar value recycling
-* Reading Parquet files in Japanese or other multi-byte locales on Windows no longer hangs (workaround for a [bug in libstdc++](https://gcc.gnu.org/bugzilla/show_bug.cgi?id=98723); thanks @yutannihilation for the persistence in discovering this!)
-* If you attempt to read string data that has embedded nul (`\0`) characters, the error message now informs you that you can set `options(arrow.skip_nul = TRUE)` to strip them out. It is not recommended to set this option by default since this code path is significantly slower, and most string data does not contain nuls.
-* `read_json_arrow()` now accepts a schema: `read_json_arrow("file.json", schema = schema(col_a = float64(), col_b = string()))`
-
-## Installation and configuration
-
-* The R package can now support working with an Arrow C++ library that has additional features (such as dataset, parquet, string libraries) disabled, and the bundled build script enables setting environment variables to disable them. See `vignette("install", package = "arrow")` for details. This allows a faster, smaller package build in cases where that is useful, and it enables a minimal, functioning R package build on Solaris.
-* On macOS, it is now possible to use the same bundled C++ build that is used by default on Linux, along with all of its customization parameters, by setting the environment variable `FORCE_BUNDLED_BUILD=true`.
-* `arrow` now uses the `mimalloc` memory allocator by default on macOS, if available (as it is in CRAN binaries), instead of `jemalloc`. There are [configuration issues](https://issues.apache.org/jira/browse/ARROW-6994) with `jemalloc` on macOS, and [benchmark analysis](https://ursalabs.org/blog/2021-r-benchmarks-part-1/) shows that this has negative effects on performance, especially on memory-intensive workflows. `jemalloc` remains the default on Linux; `mimalloc` is default on Windows.
-* Setting the `ARROW_DEFAULT_MEMORY_POOL` environment variable to switch memory allocators now works correctly when the Arrow C++ library has been statically linked (as is usually the case when installing from CRAN).
-* The `arrow_info()` function now reports on the additional optional features, as well as the detected SIMD level. If key features or compression libraries are not enabled in the build, `arrow_info()` will refer to the installation vignette for guidance on how to install a more complete build, if desired.
-* If you attempt to read a file that was compressed with a codec that your Arrow build does not contain support for, the error message now will tell you how to reinstall Arrow with that feature enabled.
-* A new vignette about developer environment setup `vignette("developing", package = "arrow")`.
-* When building from source, you can use the environment variable `ARROW_HOME` to point to a specific directory where the Arrow libraries are. This is similar to passing `INCLUDE_DIR` and `LIB_DIR`.
-
-# arrow 3.0.0
-
-## Python and Flight
-
-* Flight methods `flight_get()` and `flight_put()` (renamed from `push_data()` in this release) can handle both Tables and RecordBatches
-* `flight_put()` gains an `overwrite` argument to optionally check for the existence of a resource with the the same name
-* `list_flights()` and `flight_path_exists()` enable you to see available resources on a Flight server
-* `Schema` objects now have `r_to_py` and `py_to_r` methods
-* Schema metadata is correctly preserved when converting Tables to/from Python
-
-## Enhancements
-
-* Arithmetic operations (`+`, `*`, etc.) are supported on Arrays and ChunkedArrays and can be used in filter expressions in Arrow `dplyr` pipelines
-* Table columns can now be added, replaced, or removed by assigning (`<-`) with either `$` or `[[`
-* Column names of Tables and RecordBatches can be renamed by assigning `names()`
-* Large string types can now be written to Parquet files
-* The [pronouns `.data` and `.env`](https://rlang.r-lib.org/reference/tidyeval-data.html) are now fully supported in Arrow `dplyr` pipelines.
-* Option `arrow.skip_nul` (default `FALSE`, as in `base::scan()`) allows conversion of Arrow string (`utf8()`) type data containing embedded nul `\0` characters to R. If set to `TRUE`, nuls will be stripped and a warning is emitted if any are found.
-* `arrow_info()` for an overview of various run-time and build-time Arrow configurations, useful for debugging
-* Set environment variable `ARROW_DEFAULT_MEMORY_POOL` before loading the Arrow package to change memory allocators. Windows packages are built with `mimalloc`; most others are built with both `jemalloc` (used by default) and `mimalloc`. These alternative memory allocators are generally much faster than the system memory allocator, so they are used by default when available, but sometimes it is useful to turn them off for debugging purposes. To disable them, set `ARROW_DEFAULT_MEMORY_POO [...]
-* List columns that have attributes on each element are now also included with the metadata that is saved when creating Arrow tables. This allows `sf` tibbles to faithfully preserved and roundtripped (ARROW-10386).
-* R metadata that exceeds 100Kb is now compressed before being written to a table; see `schema()` for more details.
-
-## Bug fixes
-
-* Fixed a performance regression in converting Arrow string types to R that was present in the 2.0.0 release
-* C++ functions now trigger garbage collection when needed
-* `write_parquet()` can now write RecordBatches
-* Reading a Table from a RecordBatchStreamReader containing 0 batches no longer crashes
-* `readr`'s `problems` attribute is removed when converting to Arrow RecordBatch and table to prevent large amounts of metadata from accumulating inadvertently (ARROW-10624)
-* Fixed reading of compressed Feather files written with Arrow 0.17 (ARROW-10850)
-* `SubTreeFileSystem` gains a useful print method and no longer errors when printing
-
-## Packaging and installation
-
-* Nightly development versions of the conda `r-arrow` package are available with `conda install -c arrow-nightlies -c conda-forge --strict-channel-priority r-arrow`
-* Linux installation now safely supports older `cmake` versions
-* Compiler version checking for enabling S3 support correctly identifies the active compiler
-* Updated guidance and troubleshooting in `vignette("install", package = "arrow")`, especially for known CentOS issues
-* Operating system detection on Linux uses the [`distro`](https://enpiar.com/distro/) package. If your OS isn't correctly identified, please report an issue there.
-
-# arrow 2.0.0
-
-## Datasets
-
-* `write_dataset()` to Feather or Parquet files with partitioning. See the end of `vignette("dataset", package = "arrow")` for discussion and examples.
-* Datasets now have `head()`, `tail()`, and take (`[`) methods. `head()` is optimized but the others  may not be performant.
-* `collect()` gains an `as_data_frame` argument, default `TRUE` but when `FALSE` allows you to evaluate the accumulated `select` and `filter` query but keep the result in Arrow, not an R `data.frame`
-* `read_csv_arrow()` supports specifying column types, both with a `Schema` and with the compact string representation for types used in the `readr` package. It also has gained a `timestamp_parsers` argument that lets you express a set of `strptime` parse strings that will be tried to convert columns designated as `Timestamp` type.
-
-## AWS S3 support
-
-* S3 support is now enabled in binary macOS and Windows (Rtools40 only, i.e. R >= 4.0) packages. To enable it on Linux, you need the additional system dependencies `libcurl` and `openssl`, as well as a sufficiently modern compiler. See `vignette("install", package = "arrow")` for details.
-* File readers and writers (`read_parquet()`, `write_feather()`, et al.), as well as `open_dataset()` and `write_dataset()`, allow you to access resources on S3 (or on file systems that emulate S3) either by providing an `s3://` URI or by providing a `FileSystem$path()`. See `vignette("fs", package = "arrow")` for examples.
-* `copy_files()` allows you to recursively copy directories of files from one file system to another, such as from S3 to your local machine.
-
-## Flight RPC
-
-[Flight](https://arrow.apache.org/blog/2019/10/13/introducing-arrow-flight/)
-is a general-purpose client-server framework for high performance
-transport of large datasets over network interfaces.
-The `arrow` R package now provides methods for connecting to Flight RPC servers
-to send and receive data. See `vignette("flight", package = "arrow")` for an overview.
-
-## Computation
-
-* Comparison (`==`, `>`, etc.) and boolean (`&`, `|`, `!`) operations, along with `is.na`, `%in%` and `match` (called `match_arrow()`), on Arrow Arrays and ChunkedArrays are now implemented in the C++ library.
-* Aggregation methods `min()`, `max()`, and `unique()` are implemented for Arrays and ChunkedArrays.
-* `dplyr` filter expressions on Arrow Tables and RecordBatches are now evaluated in the C++ library, rather than by pulling data into R and evaluating. This yields significant performance improvements.
-* `dim()` (`nrow`) for dplyr queries on Table/RecordBatch is now supported
-
-## Packaging and installation
-
-* `arrow` now depends on [`cpp11`](https://cpp11.r-lib.org/), which brings more robust UTF-8 handling and faster compilation
-* The Linux build script now succeeds on older versions of R
-* MacOS binary packages now ship with zstandard compression enabled
-
-## Bug fixes and other enhancements
-
-* Automatic conversion of Arrow `Int64` type when all values fit with an R 32-bit integer now correctly inspects all chunks in a ChunkedArray, and this conversion can be disabled (so that `Int64` always yields a `bit64::integer64` vector) by setting `options(arrow.int64_downcast = FALSE)`.
-* In addition to the data.frame column metadata preserved in round trip, added in 1.0.0, now attributes of the data.frame itself are also preserved in Arrow schema metadata.
-* File writers now respect the system umask setting
-* `ParquetFileReader` has additional methods for accessing individual columns or row groups from the file
-* Various segfaults fixed: invalid input in `ParquetFileWriter`; invalid `ArrowObject` pointer from a saved R object; converting deeply nested structs from Arrow to R
-* The `properties` and `arrow_properties` arguments to `write_parquet()` are deprecated
-
-# arrow 1.0.1
-
-## Bug fixes
-
-* Filtering a Dataset that has multiple partition keys using an `%in%` expression now faithfully returns all relevant rows
-* Datasets can now have path segments in the root directory that start with `.` or `_`; files and subdirectories starting with those prefixes are still ignored
-* `open_dataset("~/path")` now correctly expands the path
-* The `version` option to `write_parquet()` is now correctly implemented
-* An UBSAN failure in the `parquet-cpp` library has been fixed
-* For bundled Linux builds, the logic for finding `cmake` is more robust, and you can now specify a `/path/to/cmake` by setting the `CMAKE` environment variable
-
-# arrow 1.0.0
-
-## Arrow format conversion
-
-* `vignette("arrow", package = "arrow")` includes tables that explain how R types are converted to Arrow types and vice versa.
-* Support added for converting to/from more Arrow types: `uint64`, `binary`, `fixed_size_binary`, `large_binary`, `large_utf8`, `large_list`, `list` of `structs`.
-* `character` vectors that exceed 2GB are converted to Arrow `large_utf8` type
-* `POSIXlt` objects can now be converted to Arrow (`struct`)
-* R `attributes()` are preserved in Arrow metadata when converting to Arrow RecordBatch and table and are restored when converting from Arrow. This means that custom subclasses, such as `haven::labelled`, are preserved in round trip through Arrow.
-* Schema metadata is now exposed as a named list, and it can be modified by assignment like `batch$metadata$new_key <- "new value"`
-* Arrow types `int64`, `uint32`, and `uint64` now are converted to R `integer` if all values fit in bounds
-* Arrow `date32` is now converted to R `Date` with `double` underlying storage. Even though the data values themselves are integers, this provides more strict round-trip fidelity
-* When converting to R `factor`, `dictionary` ChunkedArrays that do not have identical dictionaries are properly unified
-* In the 1.0 release, the Arrow IPC metadata version is increased from V4 to V5. By default, `RecordBatch{File,Stream}Writer` will write V5, but you can specify an alternate `metadata_version`. For convenience, if you know the consumer you're writing to cannot read V5, you can set the environment variable `ARROW_PRE_1_0_METADATA_VERSION=1` to write V4 without changing any other code.
-
-## Datasets
-
-* CSV and other text-delimited datasets are now supported
-* With a custom C++ build, it is possible to read datasets directly on S3 by passing a URL like `ds <- open_dataset("s3://...")`. Note that this currently requires a special C++ library build with additional dependencies--this is not yet available in CRAN releases or in nightly packages.
-* When reading individual CSV and JSON files, compression is automatically detected from the file extension
-
-## Other enhancements
-
-* Initial support for C++ aggregation methods: `sum()` and `mean()` are implemented for `Array` and `ChunkedArray`
-* Tables and RecordBatches have additional data.frame-like methods, including `dimnames()` and `as.list()`
-* Tables and ChunkedArrays can now be moved to/from Python via `reticulate`
-
-## Bug fixes and deprecations
-
-* Non-UTF-8 strings (common on Windows) are correctly coerced to UTF-8 when passing to Arrow memory and appropriately re-localized when converting to R
-* The `coerce_timestamps` option to `write_parquet()` is now correctly implemented.
-* Creating a Dictionary array respects the `type` definition if provided by the user
-* `read_arrow` and `write_arrow` are now deprecated; use the `read/write_feather()` and `read/write_ipc_stream()` functions depending on whether you're working with the Arrow IPC file or stream format, respectively.
-* Previously deprecated `FileStats`, `read_record_batch`, and `read_table` have been removed.
-
-## Installation and packaging
-
-* For improved performance in memory allocation, macOS and Linux binaries now have `jemalloc` included, and Windows packages use `mimalloc`
-* Linux installation: some tweaks to OS detection for binaries, some updates to known installation issues in the vignette
-* The bundled libarrow is built with the same `CC` and `CXX` values that R uses
-* Failure to build the bundled libarrow yields a clear message
-* Various streamlining efforts to reduce library size and compile time
-
-# arrow 0.17.1
-
-* Updates for compatibility with `dplyr` 1.0
-* `reticulate::r_to_py()` conversion now correctly works automatically, without having to call the method yourself
-* Assorted bug fixes in the C++ library around Parquet reading
-
-# arrow 0.17.0
-
-## Feather v2
-
-This release includes support for version 2 of the Feather file format.
-Feather v2 features full support for all Arrow data types,
-fixes the 2GB per-column limitation for large amounts of string data,
-and it allows files to be compressed using either `lz4` or `zstd`.
-`write_feather()` can write either version 2 or
-[version 1](https://github.com/wesm/feather) Feather files, and `read_feather()`
-automatically detects which file version it is reading.
-
-Related to this change, several functions around reading and writing data
-have been reworked. `read_ipc_stream()` and `write_ipc_stream()` have been
-added to facilitate writing data to the Arrow IPC stream format, which is
-slightly different from the IPC file format (Feather v2 *is* the IPC file format).
-
-Behavior has been standardized: all `read_<format>()` return an R `data.frame`
-(default) or a `Table` if the argument `as_data_frame = FALSE`;
-all `write_<format>()` functions return the data object, invisibly.
-To facilitate some workflows, a special `write_to_raw()` function is added
-to wrap `write_ipc_stream()` and return the `raw` vector containing the buffer
-that was written.
-
-To achieve this standardization, `read_table()`, `read_record_batch()`,
-`read_arrow()`, and `write_arrow()` have been deprecated.
-
-## Python interoperability
-
-The 0.17 Apache Arrow release includes a C data interface that allows
-exchanging Arrow data in-process at the C level without copying
-and without libraries having a build or runtime dependency on each other. This enables
-us to use `reticulate` to share data between R and Python (`pyarrow`) efficiently.
-
-See `vignette("python", package = "arrow")` for details.
-
-## Datasets
-
-* Dataset reading benefits from many speedups and fixes in the C++ library
-* Datasets have a `dim()` method, which sums rows across all files (#6635, @boshek)
-* Combine multiple datasets into a single queryable `UnionDataset` with the `c()` method
-* Dataset filtering now treats `NA` as `FALSE`, consistent with `dplyr::filter()`
-* Dataset filtering is now correctly supported for all Arrow date/time/timestamp column types
-* `vignette("dataset", package = "arrow")` now has correct, executable code
-
-## Installation
-
-* Installation on Linux now builds C++ the library from source by default, with some compression libraries disabled. For a faster, richer build, set the environment variable `NOT_CRAN=true`. See `vignette("install", package = "arrow")` for details and more options.
-* Source installation is faster and more reliable on more Linux distributions.
-
-## Other bug fixes and enhancements
-
-* `unify_schemas()` to create a `Schema` containing the union of fields in multiple schemas
-* Timezones are faithfully preserved in roundtrip between R and Arrow
-* `read_feather()` and other reader functions close any file connections they open
-* Arrow R6 objects no longer have namespace collisions when the `R.oo` package is also loaded
-* `FileStats` is renamed to `FileInfo`, and the original spelling has been deprecated
-
-# arrow 0.16.0.2
-
-* `install_arrow()` now installs the latest release of `arrow`, including Linux dependencies, either for CRAN releases or for development builds (if `nightly = TRUE`)
-* Package installation on Linux no longer downloads C++ dependencies unless the `LIBARROW_DOWNLOAD` or `NOT_CRAN` environment variable is set
-* `write_feather()`, `write_arrow()` and `write_parquet()` now return their input,
-similar to the `write_*` functions in the `readr` package (#6387, @boshek)
-* Can now infer the type of an R `list` and create a ListArray when all list elements are the same type (#6275, @michaelchirico)
-
-# arrow 0.16.0
-
-## Multi-file datasets
-
-This release includes a `dplyr` interface to Arrow Datasets,
-which let you work efficiently with large, multi-file datasets as a single entity.
-Explore a directory of data files with `open_dataset()` and then use `dplyr` methods to `select()`, `filter()`, etc. Work will be done where possible in Arrow memory. When necessary, data is pulled into R for further computation. `dplyr` methods are conditionally loaded if you have `dplyr` available; it is not a hard dependency.
-
-See `vignette("dataset", package = "arrow")` for details.
-
-## Linux installation
-
-A source package installation (as from CRAN) will now handle its C++ dependencies automatically.
-For common Linux distributions and versions, installation will retrieve a prebuilt static
-C++ library for inclusion in the package; where this binary is not available,
-the package executes a bundled script that should build the Arrow C++ library with
-no system dependencies beyond what R requires.
-
-See `vignette("install", package = "arrow")` for details.
-
-## Data exploration
-
-* `Table`s and `RecordBatch`es also have `dplyr` methods.
-* For exploration without `dplyr`, `[` methods for Tables, RecordBatches, Arrays, and ChunkedArrays now support natural row extraction operations. These use the C++ `Filter`, `Slice`, and `Take` methods for efficient access, depending on the type of selection vector.
-* An experimental, lazily evaluated `array_expression` class has also been added, enabling among other things the ability to filter a Table with some function of Arrays, such as `arrow_table[arrow_table$var1 > 5, ]` without having to pull everything into R first.
-
-## Compression
-
-* `write_parquet()` now supports compression
-* `codec_is_available()` returns `TRUE` or `FALSE` whether the Arrow C++ library was built with support for a given compression library (e.g. gzip, lz4, snappy)
-* Windows builds now include support for zstd and lz4 compression (#5814, @gnguy)
-
-## Other fixes and improvements
-
-* Arrow null type is now supported
-* Factor types are now preserved in round trip through Parquet format (#6135, @yutannihilation)
-* Reading an Arrow dictionary type coerces dictionary values to `character` (as R `factor` levels are required to be) instead of raising an error
-* Many improvements to Parquet function documentation (@karldw, @khughitt)
-
-# arrow 0.15.1
-
-* This patch release includes bugfixes in the C++ library around dictionary types and Parquet reading.
-
-# arrow 0.15.0
-
-## Breaking changes
-
-* The R6 classes that wrap the C++ classes are now documented and exported and have been renamed to be more R-friendly. Users of the high-level R interface in this package are not affected. Those who want to interact with the Arrow C++ API more directly should work with these objects and methods. As part of this change, many functions that instantiated these R6 objects have been removed in favor of `Class$create()` methods. Notably, `arrow::array()` and `arrow::table()` have been removed [...]
-* Due to a subtle change in the Arrow message format, data written by the 0.15 version libraries may not be readable by older versions. If you need to send data to a process that uses an older version of Arrow (for example, an Apache Spark server that hasn't yet updated to Arrow 0.15), you can set the environment variable `ARROW_PRE_0_15_IPC_FORMAT=1`.
-* The `as_tibble` argument in the `read_*()` functions has been renamed to `as_data_frame` (ARROW-6337, @jameslamb)
-* The `arrow::Column` class has been removed, as it was removed from the C++ library
-
-## New features
-
-* `Table` and `RecordBatch` objects have S3 methods that enable you to work with them more like `data.frame`s. Extract columns, subset, and so on. See `?Table` and `?RecordBatch` for examples.
-* Initial implementation of bindings for the C++ File System API. (ARROW-6348)
-* Compressed streams are now supported on Windows (ARROW-6360), and you can also specify a compression level (ARROW-6533)
-
-## Other upgrades
-
-* Parquet file reading is much, much faster, thanks to improvements in the Arrow C++ library.
-* `read_csv_arrow()` supports more parsing options, including `col_names`, `na`, `quoted_na`, and `skip`
-* `read_parquet()` and `read_feather()` can ingest data from a `raw` vector (ARROW-6278)
-* File readers now properly handle paths that need expanding, such as `~/file.parquet` (ARROW-6323)
-* Improved support for creating types in a schema: the types' printed names (e.g. "double") are guaranteed to be valid to use in instantiating a schema (e.g. `double()`), and time types can be created with human-friendly resolution strings ("ms", "s", etc.). (ARROW-6338, ARROW-6364)
-
-
-# arrow 0.14.1
-
-Initial CRAN release of the `arrow` package. Key features include:
-
-* Read and write support for various file formats, including Parquet, Feather/Arrow, CSV, and JSON.
-* API bindings to the C++ library for Arrow data types and objects, as well as mapping between Arrow types and R data types.
-* Tools for helping with C++ library configuration and installation.
diff --git a/r/R/array-data.R b/r/R/array-data.R
deleted file mode 100644
index 08b0913..0000000
--- a/r/R/array-data.R
+++ /dev/null
@@ -1,53 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @title ArrayData class
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @description The `ArrayData` class allows you to get and inspect the data
-#' inside an `arrow::Array`.
-#'
-#' @section Usage:
-#'
-#' ```
-#' data <- Array$create(x)$data()
-#'
-#' data$type()
-#' data$length()
-#' data$null_count()
-#' data$offset()
-#' data$buffers()
-#' ```
-#'
-#' @section Methods:
-#'
-#' ...
-#'
-#' @rdname ArrayData
-#' @name ArrayData
-#' @include type.R
-ArrayData <- R6Class("ArrayData",
-  inherit = ArrowObject,
-  active = list(
-    type = function() ArrayData__get_type(self),
-    length = function() ArrayData__get_length(self),
-    null_count = function() ArrayData__get_null_count(self),
-    offset = function() ArrayData__get_offset(self),
-    buffers = function() ArrayData__buffers(self)
-  )
-)
diff --git a/r/R/array.R b/r/R/array.R
deleted file mode 100644
index 1d63c57..0000000
--- a/r/R/array.R
+++ /dev/null
@@ -1,301 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include arrow-datum.R
-
-#' @title Arrow Arrays
-#' @description An `Array` is an immutable data array with some logical type
-#' and some length. Most logical types are contained in the base
-#' `Array` class; there are also subclasses for `DictionaryArray`, `ListArray`,
-#' and `StructArray`.
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#'
-#' @section Factory:
-#' The `Array$create()` factory method instantiates an `Array` and
-#' takes the following arguments:
-#' * `x`: an R vector, list, or `data.frame`
-#' * `type`: an optional [data type][data-type] for `x`. If omitted, the type
-#'    will be inferred from the data.
-#'
-#' `Array$create()` will return the appropriate subclass of `Array`, such as
-#' `DictionaryArray` when given an R factor.
-#'
-#' To compose a `DictionaryArray` directly, call `DictionaryArray$create()`,
-#' which takes two arguments:
-#' * `x`: an R vector or `Array` of integers for the dictionary indices
-#' * `dict`: an R vector or `Array` of dictionary values (like R factor levels
-#'   but not limited to strings only)
-#' @section Usage:
-#'
-#' ```
-#' a <- Array$create(x)
-#' length(a)
-#'
-#' print(a)
-#' a == a
-#' ```
-#'
-#' @section Methods:
-#'
-#' - `$IsNull(i)`: Return true if value at index is null. Does not boundscheck
-#' - `$IsValid(i)`: Return true if value at index is valid. Does not boundscheck
-#' - `$length()`: Size in the number of elements this array contains
-#' - `$offset()`: A relative position into another array's data, to enable zero-copy slicing
-#' - `$null_count()`: The number of null entries in the array
-#' - `$type()`: logical type of data
-#' - `$type_id()`: type id
-#' - `$Equals(other)` : is this array equal to `other`
-#' - `$ApproxEquals(other)` :
-#' - `$Diff(other)` : return a string expressing the difference between two arrays
-#' - `$data()`: return the underlying [ArrayData][ArrayData]
-#' - `$as_vector()`: convert to an R vector
-#' - `$ToString()`: string representation of the array
-#' - `$Slice(offset, length = NULL)`: Construct a zero-copy slice of the array
-#'    with the indicated offset and length. If length is `NULL`, the slice goes
-#'    until the end of the array.
-#' - `$Take(i)`: return an `Array` with values at positions given by integers
-#'    (R vector or Array Array) `i`.
-#' - `$Filter(i, keep_na = TRUE)`: return an `Array` with values at positions where logical
-#'    vector (or Arrow boolean Array) `i` is `TRUE`.
-#' - `$SortIndices(descending = FALSE)`: return an `Array` of integer positions that can be
-#'    used to rearrange the `Array` in ascending or descending order
-#' - `$RangeEquals(other, start_idx, end_idx, other_start_idx)` :
-#' - `$cast(target_type, safe = TRUE, options = cast_options(safe))`: Alter the
-#'    data in the array to change its type.
-#' - `$View(type)`: Construct a zero-copy view of this array with the given type.
-#' - `$Validate()` : Perform any validation checks to determine obvious inconsistencies
-#'    within the array's internal data. This can be an expensive check, potentially `O(length)`
-#'
-#' @rdname array
-#' @name array
-#' @export
-Array <- R6Class("Array",
-  inherit = ArrowDatum,
-  public = list(
-    IsNull = function(i) Array__IsNull(self, i),
-    IsValid = function(i) Array__IsValid(self, i),
-    length = function() Array__length(self),
-    type_id = function() Array__type_id(self),
-    Equals = function(other, ...) {
-      inherits(other, "Array") && Array__Equals(self, other)
-    },
-    ApproxEquals = function(other) {
-      inherits(other, "Array") && Array__ApproxEquals(self, other)
-    },
-    Diff = function(other) {
-      if (!inherits(other, "Array")) {
-        other <- Array$create(other)
-      }
-      Array__Diff(self, other)
-    },
-    data = function() Array__data(self),
-    as_vector = function() Array__as_vector(self),
-    ToString = function() {
-      typ <- paste0("<", self$type$ToString(), ">")
-      paste(typ, Array__ToString(self), sep = "\n")
-    },
-    Slice = function(offset, length = NULL) {
-      if (is.null(length)) {
-        Array__Slice1(self, offset)
-      } else {
-        Array__Slice2(self, offset, length)
-      }
-    },
-    Take = function(i) {
-      if (is.numeric(i)) {
-        i <- as.integer(i)
-      }
-      if (is.integer(i)) {
-        i <- Array$create(i)
-      }
-      call_function("take", self, i)
-    },
-    Filter = function(i, keep_na = TRUE) {
-      if (is.logical(i)) {
-        i <- Array$create(i)
-      }
-      assert_is(i, "Array")
-      call_function("filter", self, i, options = list(keep_na = keep_na))
-    },
-    SortIndices = function(descending = FALSE) {
-      assert_that(is.logical(descending))
-      assert_that(length(descending) == 1L)
-      assert_that(!is.na(descending))
-      call_function("array_sort_indices", self, options = list(order = descending))
-    },
-    RangeEquals = function(other, start_idx, end_idx, other_start_idx = 0L) {
-      assert_is(other, "Array")
-      Array__RangeEquals(self, other, start_idx, end_idx, other_start_idx)
-    },
-    View = function(type) {
-      Array$create(Array__View(self, as_type(type)))
-    },
-    Validate = function() Array__Validate(self)
-  ),
-  active = list(
-    null_count = function() Array__null_count(self),
-    offset = function() Array__offset(self),
-    type = function() Array__type(self)
-  )
-)
-Array$create <- function(x, type = NULL) {
-  if (!is.null(type)) {
-    type <- as_type(type)
-  }
-  if (inherits(x, "Scalar")) {
-    out <- x$as_array()
-    if (!is.null(type)) {
-      out <- out$cast(type)
-    }
-    return(out)
-  }
-  vec_to_arrow(x, type)
-}
-
-#' @rdname array
-#' @usage NULL
-#' @format NULL
-#' @export
-DictionaryArray <- R6Class("DictionaryArray", inherit = Array,
-  public = list(
-    indices = function() DictionaryArray__indices(self),
-    dictionary = function() DictionaryArray__dictionary(self)
-  ),
-  active = list(
-    ordered = function() self$type$ordered
-  )
-)
-DictionaryArray$create <- function(x, dict = NULL) {
-  if (is.factor(x)) {
-    # The simple case: converting a factor.
-    # Ignoring `dict`; should probably error if dict is not NULL
-    return(Array$create(x))
-  }
-
-  assert_that(!is.null(dict))
-  if (!is.Array(x)) {
-    x <- Array$create(x)
-  }
-  if (!is.Array(dict)) {
-    dict <- Array$create(dict)
-  }
-  type <- DictionaryType$create(x$type, dict$type)
-  DictionaryArray__FromArrays(type, x, dict)
-}
-
-#' @rdname array
-#' @usage NULL
-#' @format NULL
-#' @export
-StructArray <- R6Class("StructArray", inherit = Array,
-  public = list(
-    field = function(i) StructArray__field(self, i),
-    GetFieldByName = function(name) StructArray__GetFieldByName(self, name),
-    Flatten = function() StructArray__Flatten(self)
-  )
-)
-
-
-#' @export
-`[[.StructArray` <- function(x, i, ...) {
-  if (is.character(i)) {
-    x$GetFieldByName(i)
-  } else if (is.numeric(i)) {
-    x$field(i - 1)
-  } else {
-    stop("'i' must be character or numeric, not ", class(i), call. = FALSE)
-  }
-}
-
-#' @export
-`$.StructArray` <- function(x, name, ...) {
-  assert_that(is.string(name))
-  if (name %in% ls(x)) {
-    get(name, x)
-  } else {
-    x$GetFieldByName(name)
-  }
-}
-
-#' @export
-names.StructArray <- function(x, ...) StructType__field_names(x$type)
-
-#' @export
-dim.StructArray <- function(x, ...) c(length(x), x$type$num_fields)
-
-#' @export
-as.data.frame.StructArray <- function(x, row.names = NULL, optional = FALSE, ...) {
-  as.vector(x)
-}
-
-#' @rdname array
-#' @usage NULL
-#' @format NULL
-#' @export
-ListArray <- R6Class("ListArray", inherit = Array,
-  public = list(
-    values = function() ListArray__values(self),
-    value_length = function(i) ListArray__value_length(self, i),
-    value_offset = function(i) ListArray__value_offset(self, i),
-    raw_value_offsets = function() ListArray__raw_value_offsets(self)
-  ),
-  active = list(
-    value_type = function() ListArray__value_type(self)
-  )
-)
-
-#' @rdname array
-#' @usage NULL
-#' @format NULL
-#' @export
-LargeListArray <- R6Class("LargeListArray", inherit = Array,
-  public = list(
-    values = function() LargeListArray__values(self),
-    value_length = function(i) LargeListArray__value_length(self, i),
-    value_offset = function(i) LargeListArray__value_offset(self, i),
-    raw_value_offsets = function() LargeListArray__raw_value_offsets(self)
-  ),
-  active = list(
-    value_type = function() LargeListArray__value_type(self)
-  )
-)
-
-#' @rdname array
-#' @usage NULL
-#' @format NULL
-#' @export
-FixedSizeListArray <- R6Class("FixedSizeListArray", inherit = Array,
-  public = list(
-    values = function() FixedSizeListArray__values(self),
-    value_length = function(i) FixedSizeListArray__value_length(self, i),
-    value_offset = function(i) FixedSizeListArray__value_offset(self, i)
-  ),
-  active = list(
-    value_type = function() FixedSizeListArray__value_type(self),
-    list_size = function() self$type$list_size
-  )
-)
-
-is.Array <- function(x, type = NULL) {
-  is_it <- inherits(x, c("Array", "ChunkedArray"))
-  if (is_it && !is.null(type)) {
-    is_it <- x$type$ToString() %in% type
-  }
-  is_it
-}
diff --git a/r/R/arrow-datum.R b/r/R/arrow-datum.R
deleted file mode 100644
index dd43307..0000000
--- a/r/R/arrow-datum.R
+++ /dev/null
@@ -1,165 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include arrow-package.R
-
-# Base class for Array, ChunkedArray, and Scalar, for S3 method dispatch only.
-# Does not exist in C++ class hierarchy
-ArrowDatum <- R6Class("ArrowDatum", inherit = ArrowObject,
-  public = list(
-    cast = function(target_type, safe = TRUE, ...) {
-      opts <- cast_options(safe, ...)
-      opts$to_type <- as_type(target_type)
-      call_function("cast", self, options = opts)
-    }
-  )
-)
-
-#' @export
-length.ArrowDatum <- function(x) x$length()
-
-#' @export
-is.na.ArrowDatum <- function(x) call_function("is_null", x)
-
-#' @export
-is.nan.ArrowDatum <- function(x) call_function("is_nan", x)
-
-#' @export
-as.vector.ArrowDatum <- function(x, mode) {
-  tryCatch(
-    x$as_vector(),
-    error = handle_embedded_nul_error
-  )
-}
-
-filter_rows <- function(x, i, keep_na = TRUE, ...) {
-  # General purpose function for [ row subsetting with R semantics
-  # Based on the input for `i`, calls x$Filter, x$Slice, or x$Take
-  nrows <- x$num_rows %||% x$length() # Depends on whether Array or Table-like
-  if (inherits(i, "array_expression")) {
-    # Evaluate it
-    i <- eval_array_expression(i)
-  }
-  if (is.logical(i)) {
-    if (isTRUE(i)) {
-      # Shortcut without doing any work
-      x
-    } else {
-      i <- rep_len(i, nrows) # For R recycling behavior; consider vctrs::vec_recycle()
-      x$Filter(i, keep_na)
-    }
-  } else if (is.numeric(i)) {
-    if (all(i < 0)) {
-      # in R, negative i means "everything but i"
-      i <- setdiff(seq_len(nrows), -1 * i)
-    }
-    if (is.sliceable(i)) {
-      x$Slice(i[1] - 1, length(i))
-    } else if (all(i > 0)) {
-      x$Take(i - 1)
-    } else {
-      stop("Cannot mix positive and negative indices", call. = FALSE)
-    }
-  } else if (is.Array(i, INTEGER_TYPES)) {
-    # NOTE: this doesn't do the - 1 offset
-    x$Take(i)
-  } else if (is.Array(i, "bool")) {
-    x$Filter(i, keep_na)
-  } else {
-    # Unsupported cases
-    if (is.Array(i)) {
-      stop("Cannot extract rows with an Array of type ", i$type$ToString(), call. = FALSE)
-    }
-    stop("Cannot extract rows with an object of class ", class(i), call.=FALSE)
-  }
-}
-
-#' @export
-`[.ArrowDatum` <- filter_rows
-
-#' @importFrom utils head
-#' @export
-head.ArrowDatum <- function(x, n = 6L, ...) {
-  assert_is(n, c("numeric", "integer"))
-  assert_that(length(n) == 1)
-  len <- NROW(x)
-  if (n < 0) {
-    # head(x, negative) means all but the last n rows
-    n <- max(len + n, 0)
-  } else {
-    n <- min(len, n)
-  }
-  if (n == len) {
-    return(x)
-  }
-  x$Slice(0, n)
-}
-
-#' @importFrom utils tail
-#' @export
-tail.ArrowDatum <- function(x, n = 6L, ...) {
-  assert_is(n, c("numeric", "integer"))
-  assert_that(length(n) == 1)
-  len <- NROW(x)
-  if (n < 0) {
-    # tail(x, negative) means all but the first n rows
-    n <- min(-n, len)
-  } else {
-    n <- max(len - n, 0)
-  }
-  if (n == 0) {
-    return(x)
-  }
-  x$Slice(n)
-}
-
-is.sliceable <- function(i) {
-  # Determine whether `i` can be expressed as a $Slice() command
-  is.numeric(i) &&
-    length(i) > 0 &&
-    all(i > 0) &&
-    identical(as.integer(i), i[1]:i[length(i)])
-}
-
-#' @export
-as.double.ArrowDatum <- function(x, ...) as.double(as.vector(x), ...)
-
-#' @export
-as.integer.ArrowDatum <- function(x, ...) as.integer(as.vector(x), ...)
-
-#' @export
-as.character.ArrowDatum <- function(x, ...) as.character(as.vector(x), ...)
-
-#' @export
-sort.ArrowDatum <- function(x, decreasing = FALSE, na.last = NA, ...) {
-  # Arrow always sorts nulls at the end of the array. This corresponds to
-  # sort(na.last = TRUE). For the other two cases (na.last = NA and
-  # na.last = FALSE) we need to use workarounds.
-  # TODO: Implement this more cleanly after ARROW-12063
-  if (is.na(na.last)) {
-    # Filter out NAs before sorting
-    x <- x$Filter(!is.na(x))
-    x$Take(x$SortIndices(descending = decreasing))
-  } else if (na.last) {
-    x$Take(x$SortIndices(descending = decreasing))
-  } else {
-    # Create a new array that encodes missing values as 1 and non-missing values
-    # as 0. Sort descending by that array first to get the NAs at the beginning
-    tbl <- Table$create(x = x, `is_na` = as.integer(is.na(x)))
-    tbl$x$Take(tbl$SortIndices(names = c("is_na", "x"), descending = c(TRUE, decreasing)))
-  }
-}
diff --git a/r/R/arrow-package.R b/r/R/arrow-package.R
deleted file mode 100644
index 30d5949..0000000
--- a/r/R/arrow-package.R
+++ /dev/null
@@ -1,278 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @importFrom stats quantile median
-#' @importFrom R6 R6Class
-#' @importFrom purrr as_mapper map map2 map_chr map_dfr map_int map_lgl keep
-#' @importFrom assertthat assert_that is.string
-#' @importFrom rlang list2 %||% is_false abort dots_n warn enquo quo_is_null enquos is_integerish quos eval_tidy new_data_mask syms env new_environment env_bind as_label set_names exec is_bare_character quo_get_expr quo_set_expr .data seq2 is_quosure enexpr enexprs expr
-#' @importFrom tidyselect vars_pull vars_rename vars_select eval_select
-#' @useDynLib arrow, .registration = TRUE
-#' @keywords internal
-"_PACKAGE"
-
-#' @importFrom vctrs s3_register vec_size vec_cast vec_unique
-.onLoad <- function(...) {
-  dplyr_methods <- paste0(
-    "dplyr::",
-    c(
-      "select", "filter", "collect", "summarise", "group_by", "groups",
-      "group_vars", "group_by_drop_default", "ungroup", "mutate", "transmute",
-      "arrange", "rename", "pull", "relocate", "compute"
-    )
-  )
-  for (cl in c("Dataset", "ArrowTabular", "arrow_dplyr_query")) {
-    for (m in dplyr_methods) {
-      s3_register(m, cl)
-    }
-  }
-  s3_register("dplyr::tbl_vars", "arrow_dplyr_query")
-
-  for (cl in c("Array", "RecordBatch", "ChunkedArray", "Table", "Schema")) {
-    s3_register("reticulate::py_to_r", paste0("pyarrow.lib.", cl))
-    s3_register("reticulate::r_to_py", cl)
-  }
-
-  # Create these once, at package build time
-  if (arrow_available()) {
-    dplyr_functions$dataset <- build_function_list(build_dataset_expression)
-    dplyr_functions$array <- build_function_list(build_array_expression)
-  }
-  invisible()
-}
-
-.onAttach <- function(libname, pkgname) {
-  if (!arrow_available()) {
-    msg <- paste(
-      "The Arrow C++ library is not available. To retry installation with debug output, run:",
-      "    install_arrow(verbose = TRUE)",
-      "See https://arrow.apache.org/docs/r/articles/install.html for more guidance and troubleshooting.",
-      sep = "\n"
-    )
-    packageStartupMessage(msg)
-  } else {
-    # Just to be extra safe, let's wrap this in a try();
-    # we don't a failed startup message to prevent the package from loading
-    try({
-      features <- arrow_info()$capabilities
-      # That has all of the #ifdef features, plus the compression libs and the
-      # string libraries (but not the memory allocators, they're added elsewhere)
-      #
-      # Let's print a message if some are off
-      if (some_features_are_off(features)) {
-        packageStartupMessage("See arrow_info() for available features")
-      }
-    })
-  }
-}
-
-#' Is the C++ Arrow library available?
-#'
-#' You won't generally need to call these function, but they're made available
-#' for diagnostic purposes.
-#' @return `TRUE` or `FALSE` depending on whether the package was installed
-#' with:
-#' * The Arrow C++ library (check with `arrow_available()`)
-#' * Arrow Dataset support enabled (check with `arrow_with_dataset()`)
-#' * Parquet support enabled (check with `arrow_with_parquet()`)
-#' * Amazon S3 support enabled (check with `arrow_with_s3()`)
-#' @export
-#' @examples
-#' arrow_available()
-#' arrow_with_dataset()
-#' arrow_with_parquet()
-#' arrow_with_s3()
-#' @seealso If any of these are `FALSE`, see
-#' `vignette("install", package = "arrow")` for guidance on reinstalling the
-#' package.
-arrow_available <- function() {
-  tryCatch(.Call(`_arrow_available`), error = function(e) return(FALSE))
-}
-
-#' @rdname arrow_available
-#' @export
-arrow_with_dataset <- function() {
-  tryCatch(.Call(`_dataset_available`), error = function(e) return(FALSE))
-}
-
-#' @rdname arrow_available
-#' @export
-arrow_with_parquet <- function() {
-  tryCatch(.Call(`_parquet_available`), error = function(e) return(FALSE))
-}
-
-#' @rdname arrow_available
-#' @export
-arrow_with_s3 <- function() {
-  tryCatch(.Call(`_s3_available`), error = function(e) return(FALSE))
-}
-
-option_use_threads <- function() {
-  !is_false(getOption("arrow.use_threads"))
-}
-
-#' Report information on the package's capabilities
-#'
-#' This function summarizes a number of build-time configurations and run-time
-#' settings for the Arrow package. It may be useful for diagnostics.
-#' @return A list including version information, boolean "capabilities", and
-#' statistics from Arrow's memory allocator, and also Arrow's run-time
-#' information.
-#' @export
-#' @importFrom utils packageVersion
-arrow_info <- function() {
-  opts <- options()
-  out <- list(
-    version = packageVersion("arrow"),
-    libarrow = arrow_available(),
-    options = opts[grep("^arrow\\.", names(opts))]
-  )
-  if (out$libarrow) {
-    pool <- default_memory_pool()
-    runtimeinfo <- runtime_info()
-    compute_funcs <- list_compute_functions()
-    out <- c(out, list(
-      capabilities = c(
-        dataset = arrow_with_dataset(),
-        parquet = arrow_with_parquet(),
-        s3 = arrow_with_s3(),
-        utf8proc = "utf8_upper" %in% compute_funcs,
-        re2 = "replace_substring_regex" %in% compute_funcs,
-        vapply(tolower(names(CompressionType)[-1]), codec_is_available, logical(1))
-      ),
-      memory_pool = list(
-        backend_name = pool$backend_name,
-        bytes_allocated = pool$bytes_allocated,
-        max_memory = pool$max_memory,
-        available_backends = supported_memory_backends()
-      ),
-      runtime_info = list(
-        simd_level = runtimeinfo[1],
-        detected_simd_level = runtimeinfo[2]
-      )
-    ))
-  }
-  structure(out, class = "arrow_info")
-}
-
-some_features_are_off <- function(features) {
-  # `features` is a named logical vector (as in arrow_info()$capabilities)
-  # Let's exclude some less relevant ones
-  blocklist <- c("lzo", "bz2", "brotli")
-  # Return TRUE if any of the other features are FALSE
-  !all(features[setdiff(names(features), blocklist)])
-}
-
-#' @export
-print.arrow_info <- function(x, ...) {
-  print_key_values <- function(title, vals, ...) {
-    # Make a key-value table for printing, no column names
-    df <- data.frame(vals, stringsAsFactors = FALSE, ...)
-    names(df) <- ""
-
-    cat(title, ":\n", sep = "")
-    print(df)
-    cat("\n")
-  }
-  cat("Arrow package version: ", format(x$version), "\n\n", sep = "")
-  if (x$libarrow) {
-    print_key_values("Capabilities", c(
-      x$capabilities,
-      jemalloc = "jemalloc" %in% x$memory_pool$available_backends,
-      mimalloc = "mimalloc" %in% x$memory_pool$available_backends
-    ))
-    if (some_features_are_off(x$capabilities) && identical(tolower(Sys.info()[["sysname"]]), "linux")) {
-      # Only on linux because (e.g.) we disable certain features on purpose on rtools35 and solaris
-      cat("To reinstall with more optional capabilities enabled, see\n  https://arrow.apache.org/docs/r/articles/install.html\n\n")
-    }
-
-    if (length(x$options)) {
-      print_key_values("Arrow options()", map_chr(x$options, format))
-    }
-
-    format_bytes <- function(b, units = "auto", digits = 2L, ...) {
-      format(structure(b, class = "object_size"), units = units, digits = digits, ...)
-    }
-    print_key_values("Memory", c(
-      Allocator = x$memory_pool$backend_name,
-      # utils:::format.object_size is not properly vectorized
-      Current = format_bytes(x$memory_pool$bytes_allocated, ...),
-      Max = format_bytes(x$memory_pool$max_memory, ...)
-    ))
-    print_key_values("Runtime", c(
-      `SIMD Level` = x$runtime_info$simd_level,
-      `Detected SIMD Level` = x$runtime_info$detected_simd_level
-    ))
-  } else {
-    cat("Arrow C++ library not available. See https://arrow.apache.org/docs/r/articles/install.html for troubleshooting.\n")
-  }
-  invisible(x)
-}
-
-option_compress_metadata <- function() {
-  !is_false(getOption("arrow.compress_metadata"))
-}
-
-#' @include enums.R
-ArrowObject <- R6Class("ArrowObject",
-  public = list(
-    initialize = function(xp) self$set_pointer(xp),
-
-    pointer = function() get(".:xp:.", envir = self),
-    `.:xp:.` = NULL,
-    set_pointer = function(xp) {
-      if (!inherits(xp, "externalptr")) {
-        stop(
-          class(self)[1], "$new() requires a pointer as input: ",
-          "did you mean $create() instead?",
-          call. = FALSE
-        )
-      }
-      assign(".:xp:.", xp, envir = self)
-    },
-    print = function(...) {
-      if (!is.null(self$.class_title)) {
-        # Allow subclasses to override just printing the class name first
-        class_title <- self$.class_title()
-      } else {
-        class_title <- class(self)[[1]]
-      }
-      cat(class_title, "\n", sep = "")
-      if (!is.null(self$ToString)){
-        cat(self$ToString(), "\n", sep = "")
-      }
-      invisible(self)
-    },
-
-    invalidate = function() {
-      assign(".:xp:.", NULL, envir = self)
-    }
-  )
-)
-
-#' @export
-`!=.ArrowObject` <- function(lhs, rhs) !(lhs == rhs)
-
-#' @export
-`==.ArrowObject` <- function(x, y) {
-  x$Equals(y)
-}
-
-#' @export
-all.equal.ArrowObject <- function(target, current, ..., check.attributes = TRUE) {
-  target$Equals(current, check_metadata = check.attributes)
-}
diff --git a/r/R/arrow-tabular.R b/r/R/arrow-tabular.R
deleted file mode 100644
index f321116..0000000
--- a/r/R/arrow-tabular.R
+++ /dev/null
@@ -1,220 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include arrow-datum.R
-
-# Base class for RecordBatch and Table for S3 method dispatch only.
-# Does not exist in C++ class hierarchy
-ArrowTabular <- R6Class("ArrowTabular", inherit = ArrowObject,
-  public = list(
-    ToString = function() ToString_tabular(self),
-    Take = function(i) {
-      if (is.numeric(i)) {
-        i <- as.integer(i)
-      }
-      if (is.integer(i)) {
-        i <- Array$create(i)
-      }
-      assert_that(is.Array(i))
-      call_function("take", self, i)
-    },
-    Filter = function(i, keep_na = TRUE) {
-      if (is.logical(i)) {
-        i <- Array$create(i)
-      }
-      assert_that(is.Array(i, "bool"))
-      call_function("filter", self, i, options = list(keep_na = keep_na))
-    },
-    SortIndices = function(names, descending = FALSE) {
-      assert_that(is.character(names))
-      assert_that(length(names) > 0)
-      assert_that(!any(is.na(names)))
-      if (length(descending) == 1L) {
-        descending <- rep_len(descending, length(names))
-      }
-      assert_that(is.logical(descending))
-      assert_that(identical(length(names), length(descending)))
-      assert_that(!any(is.na(descending)))
-      call_function(
-        "sort_indices",
-        self,
-        # cpp11 does not support logical vectors so convert to integer
-        options = list(names = names, orders = as.integer(descending))
-      )
-    }
-  )
-)
-
-#' @export
-as.data.frame.ArrowTabular <- function(x, row.names = NULL, optional = FALSE, ...) {
-  tryCatch(
-    df <- x$to_data_frame(),
-    error = handle_embedded_nul_error
-  )
-  if (!is.null(r_metadata <- x$metadata$r)) {
-    df <- apply_arrow_r_metadata(df, .unserialize_arrow_r_metadata(r_metadata))
-  }
-  df
-}
-
-#' @export
-`names<-.ArrowTabular` <- function(x, value) x$RenameColumns(value)
-
-#' @importFrom methods as
-#' @export
-`[.ArrowTabular` <- function(x, i, j, ..., drop = FALSE) {
-  if (nargs() == 2L) {
-    # List-like column extraction (x[i])
-    return(x[, i])
-  }
-  if (!missing(j)) {
-    # Selecting columns is cheaper than filtering rows, so do it first.
-    # That way, if we're filtering too, we have fewer arrays to filter/slice/take
-    if (is.character(j)) {
-      j_new <- match(j, names(x))
-      if (any(is.na(j_new))) {
-        stop("Column not found: ", oxford_paste(j[is.na(j_new)]), call. = FALSE)
-      }
-      j <- j_new
-    }
-    if (is_integerish(j)) {
-      if (any(is.na(j))) {
-        stop("Column indices cannot be NA", call. = FALSE)
-      }
-      if (length(j) && all(j < 0)) {
-        # in R, negative j means "everything but j"
-        j <- setdiff(seq_len(x$num_columns), -1 * j)
-      }
-      x <- x$SelectColumns(as.integer(j) - 1L)
-    }
-
-    if (drop && ncol(x) == 1L) {
-      x <- x$column(0)
-    }
-  }
-  if (!missing(i)) {
-    x <- filter_rows(x, i, ...)
-  }
-  x
-}
-
-#' @export
-`[[.ArrowTabular` <- function(x, i, ...) {
-  if (is.character(i)) {
-    x$GetColumnByName(i)
-  } else if (is.numeric(i)) {
-    x$column(i - 1)
-  } else {
-    stop("'i' must be character or numeric, not ", class(i), call. = FALSE)
-  }
-}
-
-#' @export
-`$.ArrowTabular` <- function(x, name, ...) {
-  assert_that(is.string(name))
-  if (name %in% ls(x)) {
-    get(name, x)
-  } else {
-    x$GetColumnByName(name)
-  }
-}
-
-#' @export
-`[[<-.ArrowTabular` <- function(x, i, value) {
-  if (!is.character(i) & !is.numeric(i)) {
-    stop("'i' must be character or numeric, not ", class(i), call. = FALSE)
-  }
-  assert_that(length(i) == 1, !is.na(i))
-
-  if (is.null(value)) {
-    if (is.character(i)) {
-      i <- match(i, names(x))
-    }
-    x <- x$RemoveColumn(i - 1L)
-  } else {
-    if (!is.character(i)) {
-      # get or create a/the column name
-      if (i <= x$num_columns) {
-        i <- names(x)[i]
-      } else {
-        i <- as.character(i)
-      }
-    }
-
-    # auto-magic recycling on non-ArrowObjects
-    if (!inherits(value, "ArrowObject")) {
-      value <- vctrs::vec_recycle(value, x$num_rows)
-    }
-
-    # construct the field
-    if (inherits(x, "RecordBatch") && !inherits(value, "Array")) {
-      value <- Array$create(value)
-    } else if (inherits(x, "Table") && !inherits(value, "ChunkedArray")) {
-      value <- ChunkedArray$create(value)
-    }
-    new_field <- field(i, value$type)
-
-    if (i %in% names(x)) {
-      i <- match(i, names(x)) - 1L
-      x <- x$SetColumn(i, new_field, value)
-    } else {
-      i <- x$num_columns
-      x <- x$AddColumn(i, new_field, value)
-    }
-  }
-  x
-}
-
-#' @export
-`$<-.ArrowTabular` <- function(x, i, value) {
-  assert_that(is.string(i))
-  # We need to check if `i` is in names in case it is an active binding (e.g.
-  # `metadata`, in which case we use assign to change the active binding instead
-  # of the column in the table)
-  if (i %in% ls(x)) {
-    assign(i, value, x)
-  } else {
-    x[[i]] <- value
-  }
-  x
-}
-
-#' @export
-dim.ArrowTabular <- function(x) c(x$num_rows, x$num_columns)
-
-#' @export
-as.list.ArrowTabular <- function(x, ...) as.list(as.data.frame(x, ...))
-
-#' @export
-row.names.ArrowTabular <- function(x) as.character(seq_len(nrow(x)))
-
-#' @export
-dimnames.ArrowTabular <- function(x) list(row.names(x), names(x))
-
-#' @export
-head.ArrowTabular <- head.ArrowDatum
-
-#' @export
-tail.ArrowTabular <- tail.ArrowDatum
-
-ToString_tabular <- function(x, ...) {
-  # Generic to work with both RecordBatch and Table
-  sch <- unlist(strsplit(x$schema$ToString(), "\n"))
-  sch <- sub("(.*): (.*)", "$\\1 <\\2>", sch)
-  dims <- sprintf("%s rows x %s columns", nrow(x), ncol(x))
-  paste(c(dims, sch), collapse = "\n")
-}
diff --git a/r/R/arrowExports.R b/r/R/arrowExports.R
deleted file mode 100644
index 51cdcf8..0000000
--- a/r/R/arrowExports.R
+++ /dev/null
@@ -1,1668 +0,0 @@
-# Generated by using data-raw/codegen.R -> do not edit by hand
-
-Array__Slice1 <- function(array, offset){
-    .Call(`_arrow_Array__Slice1`, array, offset)
-}
-
-Array__Slice2 <- function(array, offset, length){
-    .Call(`_arrow_Array__Slice2`, array, offset, length)
-}
-
-Array__IsNull <- function(x, i){
-    .Call(`_arrow_Array__IsNull`, x, i)
-}
-
-Array__IsValid <- function(x, i){
-    .Call(`_arrow_Array__IsValid`, x, i)
-}
-
-Array__length <- function(x){
-    .Call(`_arrow_Array__length`, x)
-}
-
-Array__offset <- function(x){
-    .Call(`_arrow_Array__offset`, x)
-}
-
-Array__null_count <- function(x){
-    .Call(`_arrow_Array__null_count`, x)
-}
-
-Array__type <- function(x){
-    .Call(`_arrow_Array__type`, x)
-}
-
-Array__ToString <- function(x){
-    .Call(`_arrow_Array__ToString`, x)
-}
-
-Array__type_id <- function(x){
-    .Call(`_arrow_Array__type_id`, x)
-}
-
-Array__Equals <- function(lhs, rhs){
-    .Call(`_arrow_Array__Equals`, lhs, rhs)
-}
-
-Array__ApproxEquals <- function(lhs, rhs){
-    .Call(`_arrow_Array__ApproxEquals`, lhs, rhs)
-}
-
-Array__Diff <- function(lhs, rhs){
-    .Call(`_arrow_Array__Diff`, lhs, rhs)
-}
-
-Array__data <- function(array){
-    .Call(`_arrow_Array__data`, array)
-}
-
-Array__RangeEquals <- function(self, other, start_idx, end_idx, other_start_idx){
-    .Call(`_arrow_Array__RangeEquals`, self, other, start_idx, end_idx, other_start_idx)
-}
-
-Array__View <- function(array, type){
-    .Call(`_arrow_Array__View`, array, type)
-}
-
-Array__Validate <- function(array){
-    invisible(.Call(`_arrow_Array__Validate`, array))
-}
-
-DictionaryArray__indices <- function(array){
-    .Call(`_arrow_DictionaryArray__indices`, array)
-}
-
-DictionaryArray__dictionary <- function(array){
-    .Call(`_arrow_DictionaryArray__dictionary`, array)
-}
-
-StructArray__field <- function(array, i){
-    .Call(`_arrow_StructArray__field`, array, i)
-}
-
-StructArray__GetFieldByName <- function(array, name){
-    .Call(`_arrow_StructArray__GetFieldByName`, array, name)
-}
-
-StructArray__Flatten <- function(array){
-    .Call(`_arrow_StructArray__Flatten`, array)
-}
-
-ListArray__value_type <- function(array){
-    .Call(`_arrow_ListArray__value_type`, array)
-}
-
-LargeListArray__value_type <- function(array){
-    .Call(`_arrow_LargeListArray__value_type`, array)
-}
-
-ListArray__values <- function(array){
-    .Call(`_arrow_ListArray__values`, array)
-}
-
-LargeListArray__values <- function(array){
-    .Call(`_arrow_LargeListArray__values`, array)
-}
-
-ListArray__value_length <- function(array, i){
-    .Call(`_arrow_ListArray__value_length`, array, i)
-}
-
-LargeListArray__value_length <- function(array, i){
-    .Call(`_arrow_LargeListArray__value_length`, array, i)
-}
-
-FixedSizeListArray__value_length <- function(array, i){
-    .Call(`_arrow_FixedSizeListArray__value_length`, array, i)
-}
-
-ListArray__value_offset <- function(array, i){
-    .Call(`_arrow_ListArray__value_offset`, array, i)
-}
-
-LargeListArray__value_offset <- function(array, i){
-    .Call(`_arrow_LargeListArray__value_offset`, array, i)
-}
-
-FixedSizeListArray__value_offset <- function(array, i){
-    .Call(`_arrow_FixedSizeListArray__value_offset`, array, i)
-}
-
-ListArray__raw_value_offsets <- function(array){
-    .Call(`_arrow_ListArray__raw_value_offsets`, array)
-}
-
-LargeListArray__raw_value_offsets <- function(array){
-    .Call(`_arrow_LargeListArray__raw_value_offsets`, array)
-}
-
-Array__as_vector <- function(array){
-    .Call(`_arrow_Array__as_vector`, array)
-}
-
-ChunkedArray__as_vector <- function(chunked_array){
-    .Call(`_arrow_ChunkedArray__as_vector`, chunked_array)
-}
-
-RecordBatch__to_dataframe <- function(batch, use_threads){
-    .Call(`_arrow_RecordBatch__to_dataframe`, batch, use_threads)
-}
-
-Table__to_dataframe <- function(table, use_threads){
-    .Call(`_arrow_Table__to_dataframe`, table, use_threads)
-}
-
-ArrayData__get_type <- function(x){
-    .Call(`_arrow_ArrayData__get_type`, x)
-}
-
-ArrayData__get_length <- function(x){
-    .Call(`_arrow_ArrayData__get_length`, x)
-}
-
-ArrayData__get_null_count <- function(x){
-    .Call(`_arrow_ArrayData__get_null_count`, x)
-}
-
-ArrayData__get_offset <- function(x){
-    .Call(`_arrow_ArrayData__get_offset`, x)
-}
-
-ArrayData__buffers <- function(x){
-    .Call(`_arrow_ArrayData__buffers`, x)
-}
-
-Buffer__is_mutable <- function(buffer){
-    .Call(`_arrow_Buffer__is_mutable`, buffer)
-}
-
-Buffer__ZeroPadding <- function(buffer){
-    invisible(.Call(`_arrow_Buffer__ZeroPadding`, buffer))
-}
-
-Buffer__capacity <- function(buffer){
-    .Call(`_arrow_Buffer__capacity`, buffer)
-}
-
-Buffer__size <- function(buffer){
-    .Call(`_arrow_Buffer__size`, buffer)
-}
-
-r___RBuffer__initialize <- function(x){
-    .Call(`_arrow_r___RBuffer__initialize`, x)
-}
-
-Buffer__data <- function(buffer){
-    .Call(`_arrow_Buffer__data`, buffer)
-}
-
-Buffer__Equals <- function(x, y){
-    .Call(`_arrow_Buffer__Equals`, x, y)
-}
-
-ChunkedArray__length <- function(chunked_array){
-    .Call(`_arrow_ChunkedArray__length`, chunked_array)
-}
-
-ChunkedArray__null_count <- function(chunked_array){
-    .Call(`_arrow_ChunkedArray__null_count`, chunked_array)
-}
-
-ChunkedArray__num_chunks <- function(chunked_array){
-    .Call(`_arrow_ChunkedArray__num_chunks`, chunked_array)
-}
-
-ChunkedArray__chunk <- function(chunked_array, i){
-    .Call(`_arrow_ChunkedArray__chunk`, chunked_array, i)
-}
-
-ChunkedArray__chunks <- function(chunked_array){
-    .Call(`_arrow_ChunkedArray__chunks`, chunked_array)
-}
-
-ChunkedArray__type <- function(chunked_array){
-    .Call(`_arrow_ChunkedArray__type`, chunked_array)
-}
-
-ChunkedArray__Slice1 <- function(chunked_array, offset){
-    .Call(`_arrow_ChunkedArray__Slice1`, chunked_array, offset)
-}
-
-ChunkedArray__Slice2 <- function(chunked_array, offset, length){
-    .Call(`_arrow_ChunkedArray__Slice2`, chunked_array, offset, length)
-}
-
-ChunkedArray__View <- function(array, type){
-    .Call(`_arrow_ChunkedArray__View`, array, type)
-}
-
-ChunkedArray__Validate <- function(chunked_array){
-    invisible(.Call(`_arrow_ChunkedArray__Validate`, chunked_array))
-}
-
-ChunkedArray__Equals <- function(x, y){
-    .Call(`_arrow_ChunkedArray__Equals`, x, y)
-}
-
-ChunkedArray__ToString <- function(x){
-    .Call(`_arrow_ChunkedArray__ToString`, x)
-}
-
-ChunkedArray__from_list <- function(chunks, s_type){
-    .Call(`_arrow_ChunkedArray__from_list`, chunks, s_type)
-}
-
-util___Codec__Create <- function(codec, compression_level){
-    .Call(`_arrow_util___Codec__Create`, codec, compression_level)
-}
-
-util___Codec__name <- function(codec){
-    .Call(`_arrow_util___Codec__name`, codec)
-}
-
-util___Codec__IsAvailable <- function(codec){
-    .Call(`_arrow_util___Codec__IsAvailable`, codec)
-}
-
-io___CompressedOutputStream__Make <- function(codec, raw){
-    .Call(`_arrow_io___CompressedOutputStream__Make`, codec, raw)
-}
-
-io___CompressedInputStream__Make <- function(codec, raw){
-    .Call(`_arrow_io___CompressedInputStream__Make`, codec, raw)
-}
-
-RecordBatch__cast <- function(batch, schema, options){
-    .Call(`_arrow_RecordBatch__cast`, batch, schema, options)
-}
-
-Table__cast <- function(table, schema, options){
-    .Call(`_arrow_Table__cast`, table, schema, options)
-}
-
-compute__CallFunction <- function(func_name, args, options){
-    .Call(`_arrow_compute__CallFunction`, func_name, args, options)
-}
-
-compute__GroupBy <- function(arguments, keys, options){
-    .Call(`_arrow_compute__GroupBy`, arguments, keys, options)
-}
-
-compute__GetFunctionNames <- function(){
-    .Call(`_arrow_compute__GetFunctionNames`)
-}
-
-csv___ReadOptions__initialize <- function(options){
-    .Call(`_arrow_csv___ReadOptions__initialize`, options)
-}
-
-csv___ParseOptions__initialize <- function(options){
-    .Call(`_arrow_csv___ParseOptions__initialize`, options)
-}
-
-csv___ReadOptions__column_names <- function(options){
-    .Call(`_arrow_csv___ReadOptions__column_names`, options)
-}
-
-csv___ConvertOptions__initialize <- function(options){
-    .Call(`_arrow_csv___ConvertOptions__initialize`, options)
-}
-
-csv___TableReader__Make <- function(input, read_options, parse_options, convert_options){
-    .Call(`_arrow_csv___TableReader__Make`, input, read_options, parse_options, convert_options)
-}
-
-csv___TableReader__Read <- function(table_reader){
-    .Call(`_arrow_csv___TableReader__Read`, table_reader)
-}
-
-TimestampParser__kind <- function(parser){
-    .Call(`_arrow_TimestampParser__kind`, parser)
-}
-
-TimestampParser__format <- function(parser){
-    .Call(`_arrow_TimestampParser__format`, parser)
-}
-
-TimestampParser__MakeStrptime <- function(format){
-    .Call(`_arrow_TimestampParser__MakeStrptime`, format)
-}
-
-TimestampParser__MakeISO8601 <- function(){
-    .Call(`_arrow_TimestampParser__MakeISO8601`)
-}
-
-dataset___Dataset__NewScan <- function(ds){
-    .Call(`_arrow_dataset___Dataset__NewScan`, ds)
-}
-
-dataset___Dataset__schema <- function(dataset){
-    .Call(`_arrow_dataset___Dataset__schema`, dataset)
-}
-
-dataset___Dataset__type_name <- function(dataset){
-    .Call(`_arrow_dataset___Dataset__type_name`, dataset)
-}
-
-dataset___Dataset__ReplaceSchema <- function(dataset, schm){
-    .Call(`_arrow_dataset___Dataset__ReplaceSchema`, dataset, schm)
-}
-
-dataset___UnionDataset__create <- function(datasets, schm){
-    .Call(`_arrow_dataset___UnionDataset__create`, datasets, schm)
-}
-
-dataset___InMemoryDataset__create <- function(table){
-    .Call(`_arrow_dataset___InMemoryDataset__create`, table)
-}
-
-dataset___UnionDataset__children <- function(ds){
-    .Call(`_arrow_dataset___UnionDataset__children`, ds)
-}
-
-dataset___FileSystemDataset__format <- function(dataset){
-    .Call(`_arrow_dataset___FileSystemDataset__format`, dataset)
-}
-
-dataset___FileSystemDataset__filesystem <- function(dataset){
-    .Call(`_arrow_dataset___FileSystemDataset__filesystem`, dataset)
-}
-
-dataset___FileSystemDataset__files <- function(dataset){
-    .Call(`_arrow_dataset___FileSystemDataset__files`, dataset)
-}
-
-dataset___DatasetFactory__Finish1 <- function(factory, unify_schemas){
-    .Call(`_arrow_dataset___DatasetFactory__Finish1`, factory, unify_schemas)
-}
-
-dataset___DatasetFactory__Finish2 <- function(factory, schema){
-    .Call(`_arrow_dataset___DatasetFactory__Finish2`, factory, schema)
-}
-
-dataset___DatasetFactory__Inspect <- function(factory, unify_schemas){
-    .Call(`_arrow_dataset___DatasetFactory__Inspect`, factory, unify_schemas)
-}
-
-dataset___UnionDatasetFactory__Make <- function(children){
-    .Call(`_arrow_dataset___UnionDatasetFactory__Make`, children)
-}
-
-dataset___FileSystemDatasetFactory__Make0 <- function(fs, paths, format){
-    .Call(`_arrow_dataset___FileSystemDatasetFactory__Make0`, fs, paths, format)
-}
-
-dataset___FileSystemDatasetFactory__Make2 <- function(fs, selector, format, partitioning){
-    .Call(`_arrow_dataset___FileSystemDatasetFactory__Make2`, fs, selector, format, partitioning)
-}
-
-dataset___FileSystemDatasetFactory__Make1 <- function(fs, selector, format){
-    .Call(`_arrow_dataset___FileSystemDatasetFactory__Make1`, fs, selector, format)
-}
-
-dataset___FileSystemDatasetFactory__Make3 <- function(fs, selector, format, factory){
-    .Call(`_arrow_dataset___FileSystemDatasetFactory__Make3`, fs, selector, format, factory)
-}
-
-dataset___FileFormat__type_name <- function(format){
-    .Call(`_arrow_dataset___FileFormat__type_name`, format)
-}
-
-dataset___FileFormat__DefaultWriteOptions <- function(fmt){
-    .Call(`_arrow_dataset___FileFormat__DefaultWriteOptions`, fmt)
-}
-
-dataset___ParquetFileFormat__Make <- function(options, dict_columns){
-    .Call(`_arrow_dataset___ParquetFileFormat__Make`, options, dict_columns)
-}
-
-dataset___FileWriteOptions__type_name <- function(options){
-    .Call(`_arrow_dataset___FileWriteOptions__type_name`, options)
-}
-
-dataset___ParquetFileWriteOptions__update <- function(options, writer_props, arrow_writer_props){
-    invisible(.Call(`_arrow_dataset___ParquetFileWriteOptions__update`, options, writer_props, arrow_writer_props))
-}
-
-dataset___IpcFileWriteOptions__update2 <- function(ipc_options, use_legacy_format, codec, metadata_version){
-    invisible(.Call(`_arrow_dataset___IpcFileWriteOptions__update2`, ipc_options, use_legacy_format, codec, metadata_version))
-}
-
-dataset___IpcFileWriteOptions__update1 <- function(ipc_options, use_legacy_format, metadata_version){
-    invisible(.Call(`_arrow_dataset___IpcFileWriteOptions__update1`, ipc_options, use_legacy_format, metadata_version))
-}
-
-dataset___IpcFileFormat__Make <- function(){
-    .Call(`_arrow_dataset___IpcFileFormat__Make`)
-}
-
-dataset___CsvFileFormat__Make <- function(parse_options, convert_options, read_options){
-    .Call(`_arrow_dataset___CsvFileFormat__Make`, parse_options, convert_options, read_options)
-}
-
-dataset___FragmentScanOptions__type_name <- function(fragment_scan_options){
-    .Call(`_arrow_dataset___FragmentScanOptions__type_name`, fragment_scan_options)
-}
-
-dataset___CsvFragmentScanOptions__Make <- function(convert_options, read_options){
-    .Call(`_arrow_dataset___CsvFragmentScanOptions__Make`, convert_options, read_options)
-}
-
-dataset___ParquetFragmentScanOptions__Make <- function(use_buffered_stream, buffer_size, pre_buffer){
-    .Call(`_arrow_dataset___ParquetFragmentScanOptions__Make`, use_buffered_stream, buffer_size, pre_buffer)
-}
-
-dataset___DirectoryPartitioning <- function(schm){
-    .Call(`_arrow_dataset___DirectoryPartitioning`, schm)
-}
-
-dataset___DirectoryPartitioning__MakeFactory <- function(field_names){
-    .Call(`_arrow_dataset___DirectoryPartitioning__MakeFactory`, field_names)
-}
-
-dataset___HivePartitioning <- function(schm, null_fallback){
-    .Call(`_arrow_dataset___HivePartitioning`, schm, null_fallback)
-}
-
-dataset___HivePartitioning__MakeFactory <- function(null_fallback){
-    .Call(`_arrow_dataset___HivePartitioning__MakeFactory`, null_fallback)
-}
-
-dataset___ScannerBuilder__ProjectNames <- function(sb, cols){
-    invisible(.Call(`_arrow_dataset___ScannerBuilder__ProjectNames`, sb, cols))
-}
-
-dataset___ScannerBuilder__ProjectExprs <- function(sb, exprs, names){
-    invisible(.Call(`_arrow_dataset___ScannerBuilder__ProjectExprs`, sb, exprs, names))
-}
-
-dataset___ScannerBuilder__Filter <- function(sb, expr){
-    invisible(.Call(`_arrow_dataset___ScannerBuilder__Filter`, sb, expr))
-}
-
-dataset___ScannerBuilder__UseThreads <- function(sb, threads){
-    invisible(.Call(`_arrow_dataset___ScannerBuilder__UseThreads`, sb, threads))
-}
-
-dataset___ScannerBuilder__BatchSize <- function(sb, batch_size){
-    invisible(.Call(`_arrow_dataset___ScannerBuilder__BatchSize`, sb, batch_size))
-}
-
-dataset___ScannerBuilder__FragmentScanOptions <- function(sb, options){
-    invisible(.Call(`_arrow_dataset___ScannerBuilder__FragmentScanOptions`, sb, options))
-}
-
-dataset___ScannerBuilder__schema <- function(sb){
-    .Call(`_arrow_dataset___ScannerBuilder__schema`, sb)
-}
-
-dataset___ScannerBuilder__Finish <- function(sb){
-    .Call(`_arrow_dataset___ScannerBuilder__Finish`, sb)
-}
-
-dataset___Scanner__ToTable <- function(scanner){
-    .Call(`_arrow_dataset___Scanner__ToTable`, scanner)
-}
-
-dataset___Scanner__ScanBatches <- function(scanner){
-    .Call(`_arrow_dataset___Scanner__ScanBatches`, scanner)
-}
-
-dataset___Scanner__head <- function(scanner, n){
-    .Call(`_arrow_dataset___Scanner__head`, scanner, n)
-}
-
-dataset___Scanner__schema <- function(sc){
-    .Call(`_arrow_dataset___Scanner__schema`, sc)
-}
-
-dataset___ScanTask__get_batches <- function(scan_task){
-    .Call(`_arrow_dataset___ScanTask__get_batches`, scan_task)
-}
-
-dataset___Dataset__Write <- function(file_write_options, filesystem, base_dir, partitioning, basename_template, scanner){
-    invisible(.Call(`_arrow_dataset___Dataset__Write`, file_write_options, filesystem, base_dir, partitioning, basename_template, scanner))
-}
-
-dataset___Scanner__TakeRows <- function(scanner, indices){
-    .Call(`_arrow_dataset___Scanner__TakeRows`, scanner, indices)
-}
-
-Int8__initialize <- function(){
-    .Call(`_arrow_Int8__initialize`)
-}
-
-Int16__initialize <- function(){
-    .Call(`_arrow_Int16__initialize`)
-}
-
-Int32__initialize <- function(){
-    .Call(`_arrow_Int32__initialize`)
-}
-
-Int64__initialize <- function(){
-    .Call(`_arrow_Int64__initialize`)
-}
-
-UInt8__initialize <- function(){
-    .Call(`_arrow_UInt8__initialize`)
-}
-
-UInt16__initialize <- function(){
-    .Call(`_arrow_UInt16__initialize`)
-}
-
-UInt32__initialize <- function(){
-    .Call(`_arrow_UInt32__initialize`)
-}
-
-UInt64__initialize <- function(){
-    .Call(`_arrow_UInt64__initialize`)
-}
-
-Float16__initialize <- function(){
-    .Call(`_arrow_Float16__initialize`)
-}
-
-Float32__initialize <- function(){
-    .Call(`_arrow_Float32__initialize`)
-}
-
-Float64__initialize <- function(){
-    .Call(`_arrow_Float64__initialize`)
-}
-
-Boolean__initialize <- function(){
-    .Call(`_arrow_Boolean__initialize`)
-}
-
-Utf8__initialize <- function(){
-    .Call(`_arrow_Utf8__initialize`)
-}
-
-LargeUtf8__initialize <- function(){
-    .Call(`_arrow_LargeUtf8__initialize`)
-}
-
-Binary__initialize <- function(){
-    .Call(`_arrow_Binary__initialize`)
-}
-
-LargeBinary__initialize <- function(){
-    .Call(`_arrow_LargeBinary__initialize`)
-}
-
-Date32__initialize <- function(){
-    .Call(`_arrow_Date32__initialize`)
-}
-
-Date64__initialize <- function(){
-    .Call(`_arrow_Date64__initialize`)
-}
-
-Null__initialize <- function(){
-    .Call(`_arrow_Null__initialize`)
-}
-
-Decimal128Type__initialize <- function(precision, scale){
-    .Call(`_arrow_Decimal128Type__initialize`, precision, scale)
-}
-
-FixedSizeBinary__initialize <- function(byte_width){
-    .Call(`_arrow_FixedSizeBinary__initialize`, byte_width)
-}
-
-Timestamp__initialize <- function(unit, timezone){
-    .Call(`_arrow_Timestamp__initialize`, unit, timezone)
-}
-
-Time32__initialize <- function(unit){
-    .Call(`_arrow_Time32__initialize`, unit)
-}
-
-Time64__initialize <- function(unit){
-    .Call(`_arrow_Time64__initialize`, unit)
-}
-
-list__ <- function(x){
-    .Call(`_arrow_list__`, x)
-}
-
-large_list__ <- function(x){
-    .Call(`_arrow_large_list__`, x)
-}
-
-fixed_size_list__ <- function(x, list_size){
-    .Call(`_arrow_fixed_size_list__`, x, list_size)
-}
-
-struct__ <- function(fields){
-    .Call(`_arrow_struct__`, fields)
-}
-
-DataType__ToString <- function(type){
-    .Call(`_arrow_DataType__ToString`, type)
-}
-
-DataType__name <- function(type){
-    .Call(`_arrow_DataType__name`, type)
-}
-
-DataType__Equals <- function(lhs, rhs){
-    .Call(`_arrow_DataType__Equals`, lhs, rhs)
-}
-
-DataType__num_fields <- function(type){
-    .Call(`_arrow_DataType__num_fields`, type)
-}
-
-DataType__fields <- function(type){
-    .Call(`_arrow_DataType__fields`, type)
-}
-
-DataType__id <- function(type){
-    .Call(`_arrow_DataType__id`, type)
-}
-
-ListType__ToString <- function(type){
-    .Call(`_arrow_ListType__ToString`, type)
-}
-
-FixedWidthType__bit_width <- function(type){
-    .Call(`_arrow_FixedWidthType__bit_width`, type)
-}
-
-DateType__unit <- function(type){
-    .Call(`_arrow_DateType__unit`, type)
-}
-
-TimeType__unit <- function(type){
-    .Call(`_arrow_TimeType__unit`, type)
-}
-
-DecimalType__precision <- function(type){
-    .Call(`_arrow_DecimalType__precision`, type)
-}
-
-DecimalType__scale <- function(type){
-    .Call(`_arrow_DecimalType__scale`, type)
-}
-
-TimestampType__timezone <- function(type){
-    .Call(`_arrow_TimestampType__timezone`, type)
-}
-
-TimestampType__unit <- function(type){
-    .Call(`_arrow_TimestampType__unit`, type)
-}
-
-DictionaryType__initialize <- function(index_type, value_type, ordered){
-    .Call(`_arrow_DictionaryType__initialize`, index_type, value_type, ordered)
-}
-
-DictionaryType__index_type <- function(type){
-    .Call(`_arrow_DictionaryType__index_type`, type)
-}
-
-DictionaryType__value_type <- function(type){
-    .Call(`_arrow_DictionaryType__value_type`, type)
-}
-
-DictionaryType__name <- function(type){
-    .Call(`_arrow_DictionaryType__name`, type)
-}
-
-DictionaryType__ordered <- function(type){
-    .Call(`_arrow_DictionaryType__ordered`, type)
-}
-
-StructType__GetFieldByName <- function(type, name){
-    .Call(`_arrow_StructType__GetFieldByName`, type, name)
-}
-
-StructType__GetFieldIndex <- function(type, name){
-    .Call(`_arrow_StructType__GetFieldIndex`, type, name)
-}
-
-StructType__field_names <- function(type){
-    .Call(`_arrow_StructType__field_names`, type)
-}
-
-ListType__value_field <- function(type){
-    .Call(`_arrow_ListType__value_field`, type)
-}
-
-ListType__value_type <- function(type){
-    .Call(`_arrow_ListType__value_type`, type)
-}
-
-LargeListType__value_field <- function(type){
-    .Call(`_arrow_LargeListType__value_field`, type)
-}
-
-LargeListType__value_type <- function(type){
-    .Call(`_arrow_LargeListType__value_type`, type)
-}
-
-FixedSizeListType__value_field <- function(type){
-    .Call(`_arrow_FixedSizeListType__value_field`, type)
-}
-
-FixedSizeListType__value_type <- function(type){
-    .Call(`_arrow_FixedSizeListType__value_type`, type)
-}
-
-FixedSizeListType__list_size <- function(type){
-    .Call(`_arrow_FixedSizeListType__list_size`, type)
-}
-
-dataset___expr__call <- function(func_name, argument_list, options){
-    .Call(`_arrow_dataset___expr__call`, func_name, argument_list, options)
-}
-
-dataset___expr__field_ref <- function(name){
-    .Call(`_arrow_dataset___expr__field_ref`, name)
-}
-
-dataset___expr__get_field_ref_name <- function(ref){
-    .Call(`_arrow_dataset___expr__get_field_ref_name`, ref)
-}
-
-dataset___expr__scalar <- function(x){
-    .Call(`_arrow_dataset___expr__scalar`, x)
-}
-
-dataset___expr__ToString <- function(x){
-    .Call(`_arrow_dataset___expr__ToString`, x)
-}
-
-ipc___WriteFeather__Table <- function(stream, table, version, chunk_size, compression, compression_level){
-    invisible(.Call(`_arrow_ipc___WriteFeather__Table`, stream, table, version, chunk_size, compression, compression_level))
-}
-
-ipc___feather___Reader__version <- function(reader){
-    .Call(`_arrow_ipc___feather___Reader__version`, reader)
-}
-
-ipc___feather___Reader__Read <- function(reader, columns){
-    .Call(`_arrow_ipc___feather___Reader__Read`, reader, columns)
-}
-
-ipc___feather___Reader__Open <- function(stream){
-    .Call(`_arrow_ipc___feather___Reader__Open`, stream)
-}
-
-ipc___feather___Reader__schema <- function(reader){
-    .Call(`_arrow_ipc___feather___Reader__schema`, reader)
-}
-
-Field__initialize <- function(name, field, nullable){
-    .Call(`_arrow_Field__initialize`, name, field, nullable)
-}
-
-Field__ToString <- function(field){
-    .Call(`_arrow_Field__ToString`, field)
-}
-
-Field__name <- function(field){
-    .Call(`_arrow_Field__name`, field)
-}
-
-Field__Equals <- function(field, other){
-    .Call(`_arrow_Field__Equals`, field, other)
-}
-
-Field__nullable <- function(field){
-    .Call(`_arrow_Field__nullable`, field)
-}
-
-Field__type <- function(field){
-    .Call(`_arrow_Field__type`, field)
-}
-
-fs___FileInfo__type <- function(x){
-    .Call(`_arrow_fs___FileInfo__type`, x)
-}
-
-fs___FileInfo__set_type <- function(x, type){
-    invisible(.Call(`_arrow_fs___FileInfo__set_type`, x, type))
-}
-
-fs___FileInfo__path <- function(x){
-    .Call(`_arrow_fs___FileInfo__path`, x)
-}
-
-fs___FileInfo__set_path <- function(x, path){
-    invisible(.Call(`_arrow_fs___FileInfo__set_path`, x, path))
-}
-
-fs___FileInfo__size <- function(x){
-    .Call(`_arrow_fs___FileInfo__size`, x)
-}
-
-fs___FileInfo__set_size <- function(x, size){
-    invisible(.Call(`_arrow_fs___FileInfo__set_size`, x, size))
-}
-
-fs___FileInfo__base_name <- function(x){
-    .Call(`_arrow_fs___FileInfo__base_name`, x)
-}
-
-fs___FileInfo__extension <- function(x){
-    .Call(`_arrow_fs___FileInfo__extension`, x)
-}
-
-fs___FileInfo__mtime <- function(x){
-    .Call(`_arrow_fs___FileInfo__mtime`, x)
-}
-
-fs___FileInfo__set_mtime <- function(x, time){
-    invisible(.Call(`_arrow_fs___FileInfo__set_mtime`, x, time))
-}
-
-fs___FileSelector__base_dir <- function(selector){
-    .Call(`_arrow_fs___FileSelector__base_dir`, selector)
-}
-
-fs___FileSelector__allow_not_found <- function(selector){
-    .Call(`_arrow_fs___FileSelector__allow_not_found`, selector)
-}
-
-fs___FileSelector__recursive <- function(selector){
-    .Call(`_arrow_fs___FileSelector__recursive`, selector)
-}
-
-fs___FileSelector__create <- function(base_dir, allow_not_found, recursive){
-    .Call(`_arrow_fs___FileSelector__create`, base_dir, allow_not_found, recursive)
-}
-
-fs___FileSystem__GetTargetInfos_Paths <- function(file_system, paths){
-    .Call(`_arrow_fs___FileSystem__GetTargetInfos_Paths`, file_system, paths)
-}
-
-fs___FileSystem__GetTargetInfos_FileSelector <- function(file_system, selector){
-    .Call(`_arrow_fs___FileSystem__GetTargetInfos_FileSelector`, file_system, selector)
-}
-
-fs___FileSystem__CreateDir <- function(file_system, path, recursive){
-    invisible(.Call(`_arrow_fs___FileSystem__CreateDir`, file_system, path, recursive))
-}
-
-fs___FileSystem__DeleteDir <- function(file_system, path){
-    invisible(.Call(`_arrow_fs___FileSystem__DeleteDir`, file_system, path))
-}
-
-fs___FileSystem__DeleteDirContents <- function(file_system, path){
-    invisible(.Call(`_arrow_fs___FileSystem__DeleteDirContents`, file_system, path))
-}
-
-fs___FileSystem__DeleteFile <- function(file_system, path){
-    invisible(.Call(`_arrow_fs___FileSystem__DeleteFile`, file_system, path))
-}
-
-fs___FileSystem__DeleteFiles <- function(file_system, paths){
-    invisible(.Call(`_arrow_fs___FileSystem__DeleteFiles`, file_system, paths))
-}
-
-fs___FileSystem__Move <- function(file_system, src, dest){
-    invisible(.Call(`_arrow_fs___FileSystem__Move`, file_system, src, dest))
-}
-
-fs___FileSystem__CopyFile <- function(file_system, src, dest){
-    invisible(.Call(`_arrow_fs___FileSystem__CopyFile`, file_system, src, dest))
-}
-
-fs___FileSystem__OpenInputStream <- function(file_system, path){
-    .Call(`_arrow_fs___FileSystem__OpenInputStream`, file_system, path)
-}
-
-fs___FileSystem__OpenInputFile <- function(file_system, path){
-    .Call(`_arrow_fs___FileSystem__OpenInputFile`, file_system, path)
-}
-
-fs___FileSystem__OpenOutputStream <- function(file_system, path){
-    .Call(`_arrow_fs___FileSystem__OpenOutputStream`, file_system, path)
-}
-
-fs___FileSystem__OpenAppendStream <- function(file_system, path){
-    .Call(`_arrow_fs___FileSystem__OpenAppendStream`, file_system, path)
-}
-
-fs___FileSystem__type_name <- function(file_system){
-    .Call(`_arrow_fs___FileSystem__type_name`, file_system)
-}
-
-fs___LocalFileSystem__create <- function(){
-    .Call(`_arrow_fs___LocalFileSystem__create`)
-}
-
-fs___SubTreeFileSystem__create <- function(base_path, base_fs){
-    .Call(`_arrow_fs___SubTreeFileSystem__create`, base_path, base_fs)
-}
-
-fs___SubTreeFileSystem__base_fs <- function(file_system){
-    .Call(`_arrow_fs___SubTreeFileSystem__base_fs`, file_system)
-}
-
-fs___SubTreeFileSystem__base_path <- function(file_system){
-    .Call(`_arrow_fs___SubTreeFileSystem__base_path`, file_system)
-}
-
-fs___FileSystemFromUri <- function(path){
-    .Call(`_arrow_fs___FileSystemFromUri`, path)
-}
-
-fs___CopyFiles <- function(source_fs, source_sel, destination_fs, destination_base_dir, chunk_size, use_threads){
-    invisible(.Call(`_arrow_fs___CopyFiles`, source_fs, source_sel, destination_fs, destination_base_dir, chunk_size, use_threads))
-}
-
-fs___S3FileSystem__create <- function(anonymous, access_key, secret_key, session_token, role_arn, session_name, external_id, load_frequency, region, endpoint_override, scheme, background_writes){
-    .Call(`_arrow_fs___S3FileSystem__create`, anonymous, access_key, secret_key, session_token, role_arn, session_name, external_id, load_frequency, region, endpoint_override, scheme, background_writes)
-}
-
-fs___S3FileSystem__region <- function(fs){
-    .Call(`_arrow_fs___S3FileSystem__region`, fs)
-}
-
-io___Readable__Read <- function(x, nbytes){
-    .Call(`_arrow_io___Readable__Read`, x, nbytes)
-}
-
-io___InputStream__Close <- function(x){
-    invisible(.Call(`_arrow_io___InputStream__Close`, x))
-}
-
-io___OutputStream__Close <- function(x){
-    invisible(.Call(`_arrow_io___OutputStream__Close`, x))
-}
-
-io___RandomAccessFile__GetSize <- function(x){
-    .Call(`_arrow_io___RandomAccessFile__GetSize`, x)
-}
-
-io___RandomAccessFile__supports_zero_copy <- function(x){
-    .Call(`_arrow_io___RandomAccessFile__supports_zero_copy`, x)
-}
-
-io___RandomAccessFile__Seek <- function(x, position){
-    invisible(.Call(`_arrow_io___RandomAccessFile__Seek`, x, position))
-}
-
-io___RandomAccessFile__Tell <- function(x){
-    .Call(`_arrow_io___RandomAccessFile__Tell`, x)
-}
-
-io___RandomAccessFile__Read0 <- function(x){
-    .Call(`_arrow_io___RandomAccessFile__Read0`, x)
-}
-
-io___RandomAccessFile__ReadAt <- function(x, position, nbytes){
-    .Call(`_arrow_io___RandomAccessFile__ReadAt`, x, position, nbytes)
-}
-
-io___MemoryMappedFile__Create <- function(path, size){
-    .Call(`_arrow_io___MemoryMappedFile__Create`, path, size)
-}
-
-io___MemoryMappedFile__Open <- function(path, mode){
-    .Call(`_arrow_io___MemoryMappedFile__Open`, path, mode)
-}
-
-io___MemoryMappedFile__Resize <- function(x, size){
-    invisible(.Call(`_arrow_io___MemoryMappedFile__Resize`, x, size))
-}
-
-io___ReadableFile__Open <- function(path){
-    .Call(`_arrow_io___ReadableFile__Open`, path)
-}
-
-io___BufferReader__initialize <- function(buffer){
-    .Call(`_arrow_io___BufferReader__initialize`, buffer)
-}
-
-io___Writable__write <- function(stream, buf){
-    invisible(.Call(`_arrow_io___Writable__write`, stream, buf))
-}
-
-io___OutputStream__Tell <- function(stream){
-    .Call(`_arrow_io___OutputStream__Tell`, stream)
-}
-
-io___FileOutputStream__Open <- function(path){
-    .Call(`_arrow_io___FileOutputStream__Open`, path)
-}
-
-io___BufferOutputStream__Create <- function(initial_capacity){
-    .Call(`_arrow_io___BufferOutputStream__Create`, initial_capacity)
-}
-
-io___BufferOutputStream__capacity <- function(stream){
-    .Call(`_arrow_io___BufferOutputStream__capacity`, stream)
-}
-
-io___BufferOutputStream__Finish <- function(stream){
-    .Call(`_arrow_io___BufferOutputStream__Finish`, stream)
-}
-
-io___BufferOutputStream__Tell <- function(stream){
-    .Call(`_arrow_io___BufferOutputStream__Tell`, stream)
-}
-
-io___BufferOutputStream__Write <- function(stream, bytes){
-    invisible(.Call(`_arrow_io___BufferOutputStream__Write`, stream, bytes))
-}
-
-json___ReadOptions__initialize <- function(use_threads, block_size){
-    .Call(`_arrow_json___ReadOptions__initialize`, use_threads, block_size)
-}
-
-json___ParseOptions__initialize1 <- function(newlines_in_values){
-    .Call(`_arrow_json___ParseOptions__initialize1`, newlines_in_values)
-}
-
-json___ParseOptions__initialize2 <- function(newlines_in_values, explicit_schema){
-    .Call(`_arrow_json___ParseOptions__initialize2`, newlines_in_values, explicit_schema)
-}
-
-json___TableReader__Make <- function(input, read_options, parse_options){
-    .Call(`_arrow_json___TableReader__Make`, input, read_options, parse_options)
-}
-
-json___TableReader__Read <- function(table_reader){
-    .Call(`_arrow_json___TableReader__Read`, table_reader)
-}
-
-MemoryPool__default <- function(){
-    .Call(`_arrow_MemoryPool__default`)
-}
-
-MemoryPool__bytes_allocated <- function(pool){
-    .Call(`_arrow_MemoryPool__bytes_allocated`, pool)
-}
-
-MemoryPool__max_memory <- function(pool){
-    .Call(`_arrow_MemoryPool__max_memory`, pool)
-}
-
-MemoryPool__backend_name <- function(pool){
-    .Call(`_arrow_MemoryPool__backend_name`, pool)
-}
-
-supported_memory_backends <- function(){
-    .Call(`_arrow_supported_memory_backends`)
-}
-
-ipc___Message__body_length <- function(message){
-    .Call(`_arrow_ipc___Message__body_length`, message)
-}
-
-ipc___Message__metadata <- function(message){
-    .Call(`_arrow_ipc___Message__metadata`, message)
-}
-
-ipc___Message__body <- function(message){
-    .Call(`_arrow_ipc___Message__body`, message)
-}
-
-ipc___Message__Verify <- function(message){
-    .Call(`_arrow_ipc___Message__Verify`, message)
-}
-
-ipc___Message__type <- function(message){
-    .Call(`_arrow_ipc___Message__type`, message)
-}
-
-ipc___Message__Equals <- function(x, y){
-    .Call(`_arrow_ipc___Message__Equals`, x, y)
-}
-
-ipc___ReadRecordBatch__Message__Schema <- function(message, schema){
-    .Call(`_arrow_ipc___ReadRecordBatch__Message__Schema`, message, schema)
-}
-
-ipc___ReadSchema_InputStream <- function(stream){
-    .Call(`_arrow_ipc___ReadSchema_InputStream`, stream)
-}
-
-ipc___ReadSchema_Message <- function(message){
-    .Call(`_arrow_ipc___ReadSchema_Message`, message)
-}
-
-ipc___MessageReader__Open <- function(stream){
-    .Call(`_arrow_ipc___MessageReader__Open`, stream)
-}
-
-ipc___MessageReader__ReadNextMessage <- function(reader){
-    .Call(`_arrow_ipc___MessageReader__ReadNextMessage`, reader)
-}
-
-ipc___ReadMessage <- function(stream){
-    .Call(`_arrow_ipc___ReadMessage`, stream)
-}
-
-parquet___arrow___ArrowReaderProperties__Make <- function(use_threads){
-    .Call(`_arrow_parquet___arrow___ArrowReaderProperties__Make`, use_threads)
-}
-
-parquet___arrow___ArrowReaderProperties__set_use_threads <- function(properties, use_threads){
-    invisible(.Call(`_arrow_parquet___arrow___ArrowReaderProperties__set_use_threads`, properties, use_threads))
-}
-
-parquet___arrow___ArrowReaderProperties__get_use_threads <- function(properties, use_threads){
-    .Call(`_arrow_parquet___arrow___ArrowReaderProperties__get_use_threads`, properties, use_threads)
-}
-
-parquet___arrow___ArrowReaderProperties__get_read_dictionary <- function(properties, column_index){
-    .Call(`_arrow_parquet___arrow___ArrowReaderProperties__get_read_dictionary`, properties, column_index)
-}
-
-parquet___arrow___ArrowReaderProperties__set_read_dictionary <- function(properties, column_index, read_dict){
-    invisible(.Call(`_arrow_parquet___arrow___ArrowReaderProperties__set_read_dictionary`, properties, column_index, read_dict))
-}
-
-parquet___arrow___FileReader__OpenFile <- function(file, props){
-    .Call(`_arrow_parquet___arrow___FileReader__OpenFile`, file, props)
-}
-
-parquet___arrow___FileReader__ReadTable1 <- function(reader){
-    .Call(`_arrow_parquet___arrow___FileReader__ReadTable1`, reader)
-}
-
-parquet___arrow___FileReader__ReadTable2 <- function(reader, column_indices){
-    .Call(`_arrow_parquet___arrow___FileReader__ReadTable2`, reader, column_indices)
-}
-
-parquet___arrow___FileReader__ReadRowGroup1 <- function(reader, i){
-    .Call(`_arrow_parquet___arrow___FileReader__ReadRowGroup1`, reader, i)
-}
-
-parquet___arrow___FileReader__ReadRowGroup2 <- function(reader, i, column_indices){
-    .Call(`_arrow_parquet___arrow___FileReader__ReadRowGroup2`, reader, i, column_indices)
-}
-
-parquet___arrow___FileReader__ReadRowGroups1 <- function(reader, row_groups){
-    .Call(`_arrow_parquet___arrow___FileReader__ReadRowGroups1`, reader, row_groups)
-}
-
-parquet___arrow___FileReader__ReadRowGroups2 <- function(reader, row_groups, column_indices){
-    .Call(`_arrow_parquet___arrow___FileReader__ReadRowGroups2`, reader, row_groups, column_indices)
-}
-
-parquet___arrow___FileReader__num_rows <- function(reader){
-    .Call(`_arrow_parquet___arrow___FileReader__num_rows`, reader)
-}
-
-parquet___arrow___FileReader__num_columns <- function(reader){
-    .Call(`_arrow_parquet___arrow___FileReader__num_columns`, reader)
-}
-
-parquet___arrow___FileReader__num_row_groups <- function(reader){
-    .Call(`_arrow_parquet___arrow___FileReader__num_row_groups`, reader)
-}
-
-parquet___arrow___FileReader__ReadColumn <- function(reader, i){
-    .Call(`_arrow_parquet___arrow___FileReader__ReadColumn`, reader, i)
-}
-
-parquet___ArrowWriterProperties___create <- function(allow_truncated_timestamps, use_deprecated_int96_timestamps, timestamp_unit){
-    .Call(`_arrow_parquet___ArrowWriterProperties___create`, allow_truncated_timestamps, use_deprecated_int96_timestamps, timestamp_unit)
-}
-
-parquet___WriterProperties___Builder__create <- function(){
-    .Call(`_arrow_parquet___WriterProperties___Builder__create`)
-}
-
-parquet___WriterProperties___Builder__version <- function(builder, version){
-    invisible(.Call(`_arrow_parquet___WriterProperties___Builder__version`, builder, version))
-}
-
-parquet___ArrowWriterProperties___Builder__set_compressions <- function(builder, paths, types){
-    invisible(.Call(`_arrow_parquet___ArrowWriterProperties___Builder__set_compressions`, builder, paths, types))
-}
-
-parquet___ArrowWriterProperties___Builder__set_compression_levels <- function(builder, paths, levels){
-    invisible(.Call(`_arrow_parquet___ArrowWriterProperties___Builder__set_compression_levels`, builder, paths, levels))
-}
-
-parquet___ArrowWriterProperties___Builder__set_use_dictionary <- function(builder, paths, use_dictionary){
-    invisible(.Call(`_arrow_parquet___ArrowWriterProperties___Builder__set_use_dictionary`, builder, paths, use_dictionary))
-}
-
-parquet___ArrowWriterProperties___Builder__set_write_statistics <- function(builder, paths, write_statistics){
-    invisible(.Call(`_arrow_parquet___ArrowWriterProperties___Builder__set_write_statistics`, builder, paths, write_statistics))
-}
-
-parquet___ArrowWriterProperties___Builder__data_page_size <- function(builder, data_page_size){
-    invisible(.Call(`_arrow_parquet___ArrowWriterProperties___Builder__data_page_size`, builder, data_page_size))
-}
-
-parquet___WriterProperties___Builder__build <- function(builder){
-    .Call(`_arrow_parquet___WriterProperties___Builder__build`, builder)
-}
-
-parquet___arrow___ParquetFileWriter__Open <- function(schema, sink, properties, arrow_properties){
-    .Call(`_arrow_parquet___arrow___ParquetFileWriter__Open`, schema, sink, properties, arrow_properties)
-}
-
-parquet___arrow___FileWriter__WriteTable <- function(writer, table, chunk_size){
-    invisible(.Call(`_arrow_parquet___arrow___FileWriter__WriteTable`, writer, table, chunk_size))
-}
-
-parquet___arrow___FileWriter__Close <- function(writer){
-    invisible(.Call(`_arrow_parquet___arrow___FileWriter__Close`, writer))
-}
-
-parquet___arrow___WriteTable <- function(table, sink, properties, arrow_properties){
-    invisible(.Call(`_arrow_parquet___arrow___WriteTable`, table, sink, properties, arrow_properties))
-}
-
-parquet___arrow___FileReader__GetSchema <- function(reader){
-    .Call(`_arrow_parquet___arrow___FileReader__GetSchema`, reader)
-}
-
-ImportArray <- function(array, schema){
-    .Call(`_arrow_ImportArray`, array, schema)
-}
-
-ImportRecordBatch <- function(array, schema){
-    .Call(`_arrow_ImportRecordBatch`, array, schema)
-}
-
-ImportSchema <- function(schema){
-    .Call(`_arrow_ImportSchema`, schema)
-}
-
-allocate_arrow_schema <- function(){
-    .Call(`_arrow_allocate_arrow_schema`)
-}
-
-delete_arrow_schema <- function(ptr){
-    invisible(.Call(`_arrow_delete_arrow_schema`, ptr))
-}
-
-allocate_arrow_array <- function(){
-    .Call(`_arrow_allocate_arrow_array`)
-}
-
-delete_arrow_array <- function(ptr){
-    invisible(.Call(`_arrow_delete_arrow_array`, ptr))
-}
-
-ExportType <- function(type, ptr){
-    invisible(.Call(`_arrow_ExportType`, type, ptr))
-}
-
-ExportSchema <- function(schema, ptr){
-    invisible(.Call(`_arrow_ExportSchema`, schema, ptr))
-}
-
-ExportArray <- function(array, array_ptr, schema_ptr){
-    invisible(.Call(`_arrow_ExportArray`, array, array_ptr, schema_ptr))
-}
-
-ExportRecordBatch <- function(batch, array_ptr, schema_ptr){
-    invisible(.Call(`_arrow_ExportRecordBatch`, batch, array_ptr, schema_ptr))
-}
-
-vec_to_arrow <- function(x, s_type){
-    .Call(`_arrow_vec_to_arrow`, x, s_type)
-}
-
-DictionaryArray__FromArrays <- function(type, indices, dict){
-    .Call(`_arrow_DictionaryArray__FromArrays`, type, indices, dict)
-}
-
-RecordBatch__num_columns <- function(x){
-    .Call(`_arrow_RecordBatch__num_columns`, x)
-}
-
-RecordBatch__num_rows <- function(x){
-    .Call(`_arrow_RecordBatch__num_rows`, x)
-}
-
-RecordBatch__schema <- function(x){
-    .Call(`_arrow_RecordBatch__schema`, x)
-}
-
-RecordBatch__RenameColumns <- function(batch, names){
-    .Call(`_arrow_RecordBatch__RenameColumns`, batch, names)
-}
-
-RecordBatch__ReplaceSchemaMetadata <- function(x, metadata){
-    .Call(`_arrow_RecordBatch__ReplaceSchemaMetadata`, x, metadata)
-}
-
-RecordBatch__columns <- function(batch){
-    .Call(`_arrow_RecordBatch__columns`, batch)
-}
-
-RecordBatch__column <- function(batch, i){
-    .Call(`_arrow_RecordBatch__column`, batch, i)
-}
-
-RecordBatch__GetColumnByName <- function(batch, name){
-    .Call(`_arrow_RecordBatch__GetColumnByName`, batch, name)
-}
-
-RecordBatch__SelectColumns <- function(batch, indices){
-    .Call(`_arrow_RecordBatch__SelectColumns`, batch, indices)
-}
-
-RecordBatch__Equals <- function(self, other, check_metadata){
-    .Call(`_arrow_RecordBatch__Equals`, self, other, check_metadata)
-}
-
-RecordBatch__AddColumn <- function(batch, i, field, column){
-    .Call(`_arrow_RecordBatch__AddColumn`, batch, i, field, column)
-}
-
-RecordBatch__SetColumn <- function(batch, i, field, column){
-    .Call(`_arrow_RecordBatch__SetColumn`, batch, i, field, column)
-}
-
-RecordBatch__RemoveColumn <- function(batch, i){
-    .Call(`_arrow_RecordBatch__RemoveColumn`, batch, i)
-}
-
-RecordBatch__column_name <- function(batch, i){
-    .Call(`_arrow_RecordBatch__column_name`, batch, i)
-}
-
-RecordBatch__names <- function(batch){
-    .Call(`_arrow_RecordBatch__names`, batch)
-}
-
-RecordBatch__Slice1 <- function(self, offset){
-    .Call(`_arrow_RecordBatch__Slice1`, self, offset)
-}
-
-RecordBatch__Slice2 <- function(self, offset, length){
-    .Call(`_arrow_RecordBatch__Slice2`, self, offset, length)
-}
-
-ipc___SerializeRecordBatch__Raw <- function(batch){
-    .Call(`_arrow_ipc___SerializeRecordBatch__Raw`, batch)
-}
-
-ipc___ReadRecordBatch__InputStream__Schema <- function(stream, schema){
-    .Call(`_arrow_ipc___ReadRecordBatch__InputStream__Schema`, stream, schema)
-}
-
-RecordBatch__from_arrays <- function(schema_sxp, lst){
-    .Call(`_arrow_RecordBatch__from_arrays`, schema_sxp, lst)
-}
-
-RecordBatchReader__schema <- function(reader){
-    .Call(`_arrow_RecordBatchReader__schema`, reader)
-}
-
-RecordBatchReader__ReadNext <- function(reader){
-    .Call(`_arrow_RecordBatchReader__ReadNext`, reader)
-}
-
-ipc___RecordBatchStreamReader__Open <- function(stream){
-    .Call(`_arrow_ipc___RecordBatchStreamReader__Open`, stream)
-}
-
-ipc___RecordBatchStreamReader__batches <- function(reader){
-    .Call(`_arrow_ipc___RecordBatchStreamReader__batches`, reader)
-}
-
-ipc___RecordBatchFileReader__schema <- function(reader){
-    .Call(`_arrow_ipc___RecordBatchFileReader__schema`, reader)
-}
-
-ipc___RecordBatchFileReader__num_record_batches <- function(reader){
-    .Call(`_arrow_ipc___RecordBatchFileReader__num_record_batches`, reader)
-}
-
-ipc___RecordBatchFileReader__ReadRecordBatch <- function(reader, i){
-    .Call(`_arrow_ipc___RecordBatchFileReader__ReadRecordBatch`, reader, i)
-}
-
-ipc___RecordBatchFileReader__Open <- function(file){
-    .Call(`_arrow_ipc___RecordBatchFileReader__Open`, file)
-}
-
-Table__from_RecordBatchReader <- function(reader){
-    .Call(`_arrow_Table__from_RecordBatchReader`, reader)
-}
-
-Table__from_RecordBatchFileReader <- function(reader){
-    .Call(`_arrow_Table__from_RecordBatchFileReader`, reader)
-}
-
-ipc___RecordBatchFileReader__batches <- function(reader){
-    .Call(`_arrow_ipc___RecordBatchFileReader__batches`, reader)
-}
-
-ipc___RecordBatchWriter__WriteRecordBatch <- function(batch_writer, batch){
-    invisible(.Call(`_arrow_ipc___RecordBatchWriter__WriteRecordBatch`, batch_writer, batch))
-}
-
-ipc___RecordBatchWriter__WriteTable <- function(batch_writer, table){
-    invisible(.Call(`_arrow_ipc___RecordBatchWriter__WriteTable`, batch_writer, table))
-}
-
-ipc___RecordBatchWriter__Close <- function(batch_writer){
-    invisible(.Call(`_arrow_ipc___RecordBatchWriter__Close`, batch_writer))
-}
-
-ipc___RecordBatchFileWriter__Open <- function(stream, schema, use_legacy_format, metadata_version){
-    .Call(`_arrow_ipc___RecordBatchFileWriter__Open`, stream, schema, use_legacy_format, metadata_version)
-}
-
-ipc___RecordBatchStreamWriter__Open <- function(stream, schema, use_legacy_format, metadata_version){
-    .Call(`_arrow_ipc___RecordBatchStreamWriter__Open`, stream, schema, use_legacy_format, metadata_version)
-}
-
-runtime_info <- function(){
-    .Call(`_arrow_runtime_info`)
-}
-
-Array__GetScalar <- function(x, i){
-    .Call(`_arrow_Array__GetScalar`, x, i)
-}
-
-Scalar__ToString <- function(s){
-    .Call(`_arrow_Scalar__ToString`, s)
-}
-
-StructScalar__field <- function(s, i){
-    .Call(`_arrow_StructScalar__field`, s, i)
-}
-
-StructScalar__GetFieldByName <- function(s, name){
-    .Call(`_arrow_StructScalar__GetFieldByName`, s, name)
-}
-
-Scalar__as_vector <- function(scalar){
-    .Call(`_arrow_Scalar__as_vector`, scalar)
-}
-
-MakeArrayFromScalar <- function(scalar){
-    .Call(`_arrow_MakeArrayFromScalar`, scalar)
-}
-
-Scalar__is_valid <- function(s){
-    .Call(`_arrow_Scalar__is_valid`, s)
-}
-
-Scalar__type <- function(s){
-    .Call(`_arrow_Scalar__type`, s)
-}
-
-Scalar__Equals <- function(lhs, rhs){
-    .Call(`_arrow_Scalar__Equals`, lhs, rhs)
-}
-
-Scalar__ApproxEquals <- function(lhs, rhs){
-    .Call(`_arrow_Scalar__ApproxEquals`, lhs, rhs)
-}
-
-schema_ <- function(fields){
-    .Call(`_arrow_schema_`, fields)
-}
-
-Schema__ToString <- function(s){
-    .Call(`_arrow_Schema__ToString`, s)
-}
-
-Schema__num_fields <- function(s){
-    .Call(`_arrow_Schema__num_fields`, s)
-}
-
-Schema__field <- function(s, i){
-    .Call(`_arrow_Schema__field`, s, i)
-}
-
-Schema__AddField <- function(s, i, field){
-    .Call(`_arrow_Schema__AddField`, s, i, field)
-}
-
-Schema__SetField <- function(s, i, field){
-    .Call(`_arrow_Schema__SetField`, s, i, field)
-}
-
-Schema__RemoveField <- function(s, i){
-    .Call(`_arrow_Schema__RemoveField`, s, i)
-}
-
-Schema__GetFieldByName <- function(s, x){
-    .Call(`_arrow_Schema__GetFieldByName`, s, x)
-}
-
-Schema__fields <- function(schema){
-    .Call(`_arrow_Schema__fields`, schema)
-}
-
-Schema__field_names <- function(schema){
-    .Call(`_arrow_Schema__field_names`, schema)
-}
-
-Schema__HasMetadata <- function(schema){
-    .Call(`_arrow_Schema__HasMetadata`, schema)
-}
-
-Schema__metadata <- function(schema){
-    .Call(`_arrow_Schema__metadata`, schema)
-}
-
-Schema__WithMetadata <- function(schema, metadata){
-    .Call(`_arrow_Schema__WithMetadata`, schema, metadata)
-}
-
-Schema__serialize <- function(schema){
-    .Call(`_arrow_Schema__serialize`, schema)
-}
-
-Schema__Equals <- function(schema, other, check_metadata){
-    .Call(`_arrow_Schema__Equals`, schema, other, check_metadata)
-}
-
-arrow__UnifySchemas <- function(schemas){
-    .Call(`_arrow_arrow__UnifySchemas`, schemas)
-}
-
-Table__num_columns <- function(x){
-    .Call(`_arrow_Table__num_columns`, x)
-}
-
-Table__num_rows <- function(x){
-    .Call(`_arrow_Table__num_rows`, x)
-}
-
-Table__schema <- function(x){
-    .Call(`_arrow_Table__schema`, x)
-}
-
-Table__ReplaceSchemaMetadata <- function(x, metadata){
-    .Call(`_arrow_Table__ReplaceSchemaMetadata`, x, metadata)
-}
-
-Table__column <- function(table, i){
-    .Call(`_arrow_Table__column`, table, i)
-}
-
-Table__field <- function(table, i){
-    .Call(`_arrow_Table__field`, table, i)
-}
-
-Table__columns <- function(table){
-    .Call(`_arrow_Table__columns`, table)
-}
-
-Table__ColumnNames <- function(table){
-    .Call(`_arrow_Table__ColumnNames`, table)
-}
-
-Table__RenameColumns <- function(table, names){
-    .Call(`_arrow_Table__RenameColumns`, table, names)
-}
-
-Table__Slice1 <- function(table, offset){
-    .Call(`_arrow_Table__Slice1`, table, offset)
-}
-
-Table__Slice2 <- function(table, offset, length){
-    .Call(`_arrow_Table__Slice2`, table, offset, length)
-}
-
-Table__Equals <- function(lhs, rhs, check_metadata){
-    .Call(`_arrow_Table__Equals`, lhs, rhs, check_metadata)
-}
-
-Table__Validate <- function(table){
-    .Call(`_arrow_Table__Validate`, table)
-}
-
-Table__ValidateFull <- function(table){
-    .Call(`_arrow_Table__ValidateFull`, table)
-}
-
-Table__GetColumnByName <- function(table, name){
-    .Call(`_arrow_Table__GetColumnByName`, table, name)
-}
-
-Table__RemoveColumn <- function(table, i){
-    .Call(`_arrow_Table__RemoveColumn`, table, i)
-}
-
-Table__AddColumn <- function(table, i, field, column){
-    .Call(`_arrow_Table__AddColumn`, table, i, field, column)
-}
-
-Table__SetColumn <- function(table, i, field, column){
-    .Call(`_arrow_Table__SetColumn`, table, i, field, column)
-}
-
-Table__SelectColumns <- function(table, indices){
-    .Call(`_arrow_Table__SelectColumns`, table, indices)
-}
-
-all_record_batches <- function(lst){
-    .Call(`_arrow_all_record_batches`, lst)
-}
-
-Table__from_record_batches <- function(batches, schema_sxp){
-    .Call(`_arrow_Table__from_record_batches`, batches, schema_sxp)
-}
-
-Table__from_dots <- function(lst, schema_sxp){
-    .Call(`_arrow_Table__from_dots`, lst, schema_sxp)
-}
-
-GetCpuThreadPoolCapacity <- function(){
-    .Call(`_arrow_GetCpuThreadPoolCapacity`)
-}
-
-SetCpuThreadPoolCapacity <- function(threads){
-    invisible(.Call(`_arrow_SetCpuThreadPoolCapacity`, threads))
-}
-
-Array__infer_type <- function(x){
-    .Call(`_arrow_Array__infer_type`, x)
-}
-
-
-
diff --git a/r/R/buffer.R b/r/R/buffer.R
deleted file mode 100644
index db61ed3..0000000
--- a/r/R/buffer.R
+++ /dev/null
@@ -1,72 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @title Buffer class
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @description A Buffer is an object containing a pointer to a piece of
-#' contiguous memory with a particular size.
-#' @section Factory:
-#' `buffer()` lets you create an `arrow::Buffer` from an R object
-#' @section Methods:
-#'
-#' - `$is_mutable()` :
-#' - `$ZeroPadding()` :
-#' - `$size()` :
-#' - `$capacity()`:
-#'
-#' @rdname buffer
-#' @name buffer
-#' @export
-#' @include arrow-package.R
-#' @include enums.R
-Buffer <- R6Class("Buffer", inherit = ArrowObject,
-  public = list(
-    ZeroPadding = function() Buffer__ZeroPadding(self),
-    data = function() Buffer__data(self),
-    Equals = function(other, ...) {
-      inherits(other, "Buffer") && Buffer__Equals(self, other)
-    }
-  ),
-
-  active = list(
-    is_mutable = function() Buffer__is_mutable(self),
-    size = function() Buffer__size(self),
-    capacity = function() Buffer__capacity(self)
-  )
-)
-
-Buffer$create <- function(x) {
-  if (inherits(x, "Buffer")) {
-    x
-  } else if (inherits(x, c("raw", "numeric", "integer", "complex"))) {
-    r___RBuffer__initialize(x)
-  } else if (inherits(x, "BufferOutputStream")) {
-    x$finish()
-  } else {
-    stop("Cannot convert object of class ", class(x), " to arrow::Buffer")
-  }
-}
-
-#' @param x R object. Only raw, numeric and integer vectors are currently supported
-#' @return an instance of `Buffer` that borrows memory from `x`
-#' @export
-buffer <- Buffer$create
-
-#' @export
-as.raw.Buffer <- function(x) x$data()
diff --git a/r/R/chunked-array.R b/r/R/chunked-array.R
deleted file mode 100644
index a7f9c8f..0000000
--- a/r/R/chunked-array.R
+++ /dev/null
@@ -1,132 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include arrow-datum.R
-
-#' @title ChunkedArray class
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @description A `ChunkedArray` is a data structure managing a list of
-#' primitive Arrow [Arrays][Array] logically as one large array. Chunked arrays
-#' may be grouped together in a [Table].
-#' @section Factory:
-#' The `ChunkedArray$create()` factory method instantiates the object from
-#' various Arrays or R vectors. `chunked_array()` is an alias for it.
-#'
-#' @section Methods:
-#'
-#' - `$length()`: Size in the number of elements this array contains
-#' - `$chunk(i)`: Extract an `Array` chunk by integer position
-#' - `$as_vector()`: convert to an R vector
-#' - `$Slice(offset, length = NULL)`: Construct a zero-copy slice of the array
-#'    with the indicated offset and length. If length is `NULL`, the slice goes
-#'    until the end of the array.
-#' - `$Take(i)`: return a `ChunkedArray` with values at positions given by
-#'    integers `i`. If `i` is an Arrow `Array` or `ChunkedArray`, it will be
-#'    coerced to an R vector before taking.
-#' - `$Filter(i, keep_na = TRUE)`: return a `ChunkedArray` with values at positions where
-#'    logical vector or Arrow boolean-type `(Chunked)Array` `i` is `TRUE`.
-#' - `$SortIndices(descending = FALSE)`: return an `Array` of integer positions that can be
-#'    used to rearrange the `ChunkedArray` in ascending or descending order
-#' - `$cast(target_type, safe = TRUE, options = cast_options(safe))`: Alter the
-#'    data in the array to change its type.
-#' - `$null_count()`: The number of null entries in the array
-#' - `$chunks()`: return a list of `Array`s
-#' - `$num_chunks()`: integer number of chunks in the `ChunkedArray`
-#' - `$type()`: logical type of data
-#' - `$View(type)`: Construct a zero-copy view of this `ChunkedArray` with the
-#'    given type.
-#' - `$Validate()`: Perform any validation checks to determine obvious inconsistencies
-#'    within the array's internal data. This can be an expensive check, potentially `O(length)`
-#'
-#' @rdname ChunkedArray
-#' @name ChunkedArray
-#' @seealso [Array]
-#' @export
-ChunkedArray <- R6Class("ChunkedArray", inherit = ArrowDatum,
-  public = list(
-    length = function() ChunkedArray__length(self),
-    chunk = function(i) Array$create(ChunkedArray__chunk(self, i)),
-    as_vector = function() ChunkedArray__as_vector(self),
-    Slice = function(offset, length = NULL){
-      if (is.null(length)) {
-        ChunkedArray__Slice1(self, offset)
-      } else {
-        ChunkedArray__Slice2(self, offset, length)
-      }
-    },
-    Take = function(i) {
-      if (is.numeric(i)) {
-        i <- as.integer(i)
-      }
-      if (is.integer(i)) {
-        i <- Array$create(i)
-      }
-      call_function("take", self, i)
-    },
-    Filter = function(i, keep_na = TRUE) {
-      if (is.logical(i)) {
-        i <- Array$create(i)
-      }
-      call_function("filter", self, i, options = list(keep_na = keep_na))
-    },
-    SortIndices = function(descending = FALSE) {
-      assert_that(is.logical(descending))
-      assert_that(length(descending) == 1L)
-      assert_that(!is.na(descending))
-      # TODO: after ARROW-12042 is closed, review whether this and the
-      # Array$SortIndices definition can be consolidated
-      call_function(
-        "sort_indices",
-        self,
-        options = list(names = "", orders = as.integer(descending))
-      )
-    },
-    View = function(type) {
-      ChunkedArray__View(self, as_type(type))
-    },
-    Validate = function() {
-      ChunkedArray__Validate(self)
-    },
-    ToString = function() {
-      ChunkedArray__ToString(self)
-    },
-    Equals = function(other, ...) {
-      inherits(other, "ChunkedArray") && ChunkedArray__Equals(self, other)
-    }
-  ),
-  active = list(
-    null_count = function() ChunkedArray__null_count(self),
-    num_chunks = function() ChunkedArray__num_chunks(self),
-    chunks = function() map(ChunkedArray__chunks(self), Array$create),
-    type = function() ChunkedArray__type(self)
-  )
-)
-
-ChunkedArray$create <- function(..., type = NULL) {
-  if (!is.null(type)) {
-    type <- as_type(type)
-  }
-  ChunkedArray__from_list(list2(...), type)
-}
-
-#' @param \dots Vectors to coerce
-#' @param type currently ignored
-#' @rdname ChunkedArray
-#' @export
-chunked_array <- ChunkedArray$create
diff --git a/r/R/compression.R b/r/R/compression.R
deleted file mode 100644
index ebd4c54..0000000
--- a/r/R/compression.R
+++ /dev/null
@@ -1,121 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include enums.R
-#' @include arrow-package.R
-#' @include io.R
-
-#' @title Compression Codec class
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @description Codecs allow you to create [compressed input and output
-#' streams][compression].
-#' @section Factory:
-#' The `Codec$create()` factory method takes the following arguments:
-#' * `type`: string name of the compression method. Possible values are
-#'    "uncompressed", "snappy", "gzip", "brotli", "zstd", "lz4", "lzo", or
-#'    "bz2". `type` may be upper- or lower-cased. Not all methods may be
-#'    available; support depends on build-time flags for the C++ library.
-#'    See [codec_is_available()]. Most builds support at least "snappy" and
-#'    "gzip". All support "uncompressed".
-#' * `compression_level`: compression level, the default value (`NA`) uses the
-#'    default compression level for the selected compression `type`.
-#' @rdname Codec
-#' @name Codec
-#' @export
-Codec <- R6Class("Codec", inherit = ArrowObject,
-  active = list(
-    name = function() util___Codec__name(self),
-    level = function() abort("Codec$level() not yet implemented")
-  )
-)
-Codec$create <- function(type = "gzip", compression_level = NA) {
-  if (is.string(type)) {
-    type <- util___Codec__Create(
-      compression_from_name(type), compression_level
-    )
-  }
-  assert_is(type, "Codec")
-  type
-}
-
-#' Check whether a compression codec is available
-#'
-#' Support for compression libraries depends on the build-time settings of
-#' the Arrow C++ library. This function lets you know which are available for
-#' use.
-#' @param type A string, one of "uncompressed", "snappy", "gzip", "brotli",
-#' "zstd", "lz4", "lzo", or "bz2", case insensitive.
-#' @return Logical: is `type` available?
-#' @export
-codec_is_available <- function(type) {
-  util___Codec__IsAvailable(compression_from_name(type))
-}
-
-compression_from_name <- function(name) {
-  map_int(name, ~CompressionType[[match.arg(toupper(.x), names(CompressionType))]])
-}
-
-#' @title Compressed stream classes
-#' @rdname compression
-#' @name compression
-#' @aliases CompressedInputStream CompressedOutputStream
-#' @docType class
-#' @usage NULL
-#' @format NULL
-#' @description `CompressedInputStream` and `CompressedOutputStream`
-#' allow you to apply a compression [Codec] to an
-#' input or output stream.
-#'
-#' @section Factory:
-#'
-#' The `CompressedInputStream$create()` and `CompressedOutputStream$create()`
-#' factory methods instantiate the object and take the following arguments:
-#'
-#' - `stream` An [InputStream] or [OutputStream], respectively
-#' - `codec` A `Codec`, either a [Codec][Codec] instance or a string
-#' - `compression_level` compression level for when the `codec` argument is given as a string
-#'
-#' @section Methods:
-#'
-#' Methods are inherited from [InputStream] and [OutputStream], respectively
-#' @export
-#' @include arrow-package.R
-CompressedOutputStream <- R6Class("CompressedOutputStream", inherit = OutputStream)
-CompressedOutputStream$create <- function(stream, codec = "gzip", compression_level = NA){
-  codec <- Codec$create(codec, compression_level = compression_level)
-  if (is.string(stream)) {
-    stream <- FileOutputStream$create(stream)
-  }
-  assert_is(stream, "OutputStream")
-  io___CompressedOutputStream__Make(codec, stream)
-}
-
-#' @rdname compression
-#' @usage NULL
-#' @format NULL
-#' @export
-CompressedInputStream <- R6Class("CompressedInputStream", inherit = InputStream)
-CompressedInputStream$create <- function(stream, codec = "gzip", compression_level = NA){
-  codec <- Codec$create(codec, compression_level = compression_level)
-  if (is.string(stream)) {
-    stream <- ReadableFile$create(stream)
-  }
-  assert_is(stream, "InputStream")
-  io___CompressedInputStream__Make(codec, stream)
-}
diff --git a/r/R/compute.R b/r/R/compute.R
deleted file mode 100644
index 1b79d76..0000000
--- a/r/R/compute.R
+++ /dev/null
@@ -1,257 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Call an Arrow compute function
-#'
-#' This function provides a lower-level API for calling Arrow functions by their
-#' string function name. You won't use it directly for most applications.
-#' Many Arrow compute functions are mapped to R methods,
-#' and in a `dplyr` evaluation context, [all Arrow functions][list_compute_functions()]
-#' are callable with an `arrow_` prefix.
-#' @param function_name string Arrow compute function name
-#' @param ... Function arguments, which may include `Array`, `ChunkedArray`, `Scalar`,
-#' `RecordBatch`, or `Table`.
-#' @param args list arguments as an alternative to specifying in `...`
-#' @param options named list of C++ function options.
-#' @return An `Array`, `ChunkedArray`, `Scalar`, `RecordBatch`, or `Table`, whatever the compute function results in.
-#' @seealso [Arrow C++ documentation](https://arrow.apache.org/docs/cpp/compute.html) for the functions and their respective options.
-#' @examples
-#' \donttest{
-#' a <- Array$create(c(1L, 2L, 3L, NA, 5L))
-#' s <- Scalar$create(4L)
-#' call_function("fill_null", a, s)
-#'
-#' a <- Array$create(rnorm(10000))
-#' call_function("quantile", a, options = list(q = seq(0, 1, 0.25)))
-#' }
-#' @export
-#' @include array.R
-#' @include chunked-array.R
-#' @include scalar.R
-call_function <- function(function_name, ..., args = list(...), options = empty_named_list()) {
-  assert_that(is.string(function_name))
-  assert_that(is.list(options), !is.null(names(options)))
-
-  datum_classes <- c("Array", "ChunkedArray", "RecordBatch", "Table", "Scalar")
-  valid_args <- map_lgl(args, ~inherits(., datum_classes))
-  if (!all(valid_args)) {
-    # Lame, just pick one to report
-    first_bad <- min(which(!valid_args))
-    stop("Argument ", first_bad, " is of class ", head(class(args[[first_bad]]), 1), " but it must be one of ", oxford_paste(datum_classes, "or"), call. = FALSE)
-  }
-
-  compute__CallFunction(function_name, args, options)
-}
-
-#' List available Arrow C++ compute functions
-#'
-#' This function lists the names of all available Arrow C++ library compute functions.
-#' These can be called by passing to [call_function()], or they can be
-#' called by name with an `arrow_` prefix inside a `dplyr` verb.
-#'
-#' The resulting list describes the capabilities of your `arrow` build.
-#' Some functions, such as string and regular expression functions,
-#' require optional build-time C++ dependencies. If your `arrow` package
-#' was not compiled with those features enabled, those functions will
-#' not appear in this list.
-#'
-#' Some functions take options that need to be passed when calling them
-#' (in a list called `options`). These options require custom handling
-#' in C++; many functions already have that handling set up but not all do.
-#' If you encounter one that needs special handling for options, please
-#' report an issue.
-#'
-#' Note that this list does *not* enumerate all of the R bindings for these functions.
-#' The package includes Arrow methods for many base R functions that can
-#' be called directly on Arrow objects, as well as some tidyverse-flavored versions
-#' available inside `dplyr` verbs.
-#'
-#' @param pattern Optional regular expression to filter the function list
-#' @param ... Additional parameters passed to `grep()`
-#' @return A character vector of available Arrow C++ function names
-#' @export
-list_compute_functions <- function(pattern = NULL, ...) {
-  funcs <- compute__GetFunctionNames()
-  if (!is.null(pattern)) {
-    funcs <- grep(pattern, funcs, value = TRUE, ...)
-  }
-  funcs
-}
-
-#' @export
-sum.ArrowDatum <- function(..., na.rm = FALSE) scalar_aggregate("sum", ..., na.rm = na.rm)
-
-#' @export
-mean.ArrowDatum <- function(..., na.rm = FALSE) scalar_aggregate("mean", ..., na.rm = na.rm)
-
-#' @export
-min.ArrowDatum <- function(..., na.rm = FALSE) {
-  scalar_aggregate("min_max", ..., na.rm = na.rm)$GetFieldByName("min")
-}
-
-#' @export
-max.ArrowDatum <- function(..., na.rm = FALSE) {
-  scalar_aggregate("min_max", ..., na.rm = na.rm)$GetFieldByName("max")
-}
-
-scalar_aggregate <- function(FUN, ..., na.rm = FALSE) {
-  a <- collect_arrays_from_dots(list(...))
-  if (!na.rm && a$null_count > 0 && (FUN %in% c("mean", "sum"))) {
-    # Arrow sum/mean function always drops NAs so handle that here
-    # https://issues.apache.org/jira/browse/ARROW-9054
-    return(Scalar$create(NA_real_))
-  }
-
-  call_function(FUN, a, options = list(na.rm = na.rm))
-}
-
-collect_arrays_from_dots <- function(dots) {
-  # Given a list that may contain both Arrays and ChunkedArrays,
-  # return a single ChunkedArray containing all of those chunks
-  # (may return a regular Array if there is only one element in dots)
-  assert_that(all(map_lgl(dots, is.Array)))
-  if (length(dots) == 1) {
-    return(dots[[1]])
-  }
-
-  arrays <- unlist(lapply(dots, function(x) {
-    if (inherits(x, "ChunkedArray")) {
-      x$chunks
-    } else {
-      x
-    }
-  }))
-  ChunkedArray$create(!!!arrays)
-}
-
-#' @export
-quantile.ArrowDatum <- function(x,
-                                probs = seq(0, 1, 0.25),
-                                na.rm = FALSE,
-                                type = 7,
-                                interpolation = c("linear", "lower", "higher", "nearest", "midpoint"),
-                                ...) {
-  if (inherits(x, "Scalar")) x <- Array$create(x)
-  assert_is(probs, c("numeric", "integer"))
-  assert_that(length(probs) > 0)
-  assert_that(all(probs >= 0 & probs <= 1))
-  if (!na.rm && x$null_count > 0) {
-    stop("Missing values not allowed if 'na.rm' is FALSE", call. = FALSE)
-  }
-  if (type != 7) {
-    stop(
-      "Argument `type` not supported in Arrow. To control the quantile ",
-      "interpolation algorithm, set argument `interpolation` to one of: ",
-      "\"linear\" (the default), \"lower\", \"higher\", \"nearest\", or ",
-      "\"midpoint\".",
-      call. = FALSE
-    )
-  }
-  interpolation <- QuantileInterpolation[[toupper(match.arg(interpolation))]]
-  out <- call_function("quantile", x, options = list(q = probs, interpolation = interpolation))
-  if (length(out) == 0) {
-    # When there are no non-missing values in the data, the Arrow quantile
-    # function returns an empty Array, but for consistency with the R quantile
-    # function, we want an Array of NA_real_ with the same length as probs
-    out <- Array$create(rep(NA_real_, length(probs)))
-  }
-  out
-}
-
-#' @export
-median.ArrowDatum <- function(x, na.rm = FALSE, ...) {
-  if (!na.rm && x$null_count > 0) {
-    Scalar$create(NA_real_)
-  } else {
-    Scalar$create(quantile(x, probs = 0.5, na.rm = TRUE, ...))
-  }
-}
-
-#' @export
-unique.ArrowDatum <- function(x, incomparables = FALSE, ...) {
-  call_function("unique", x)
-}
-
-#' `match` and `%in%` for Arrow objects
-#'
-#' `base::match()` is not a generic, so we can't just define Arrow methods for
-#' it. This function exposes the analogous functions in the Arrow C++ library.
-#'
-#' @param x `Array` or `ChunkedArray`
-#' @param table `Array`, `ChunkedArray`, or R vector lookup table.
-#' @param ... additional arguments, ignored
-#' @return `match_arrow()` returns an `int32`-type `Array` of the same length
-#' as `x` with the (0-based) indexes into `table`. `is_in()` returns a
-#' `boolean`-type `Array` of the same length as `x` with values indicating
-#' per element of `x` it it is present in `table`.
-#' @export
-match_arrow <- function(x, table, ...) UseMethod("match_arrow")
-
-#' @export
-match_arrow.default <- function(x, table, ...) match(x, table, ...)
-
-#' @export
-match_arrow.ArrowDatum <- function(x, table, ...) {
-  if (!inherits(table, c("Array", "ChunkedArray"))) {
-    table <- Array$create(table)
-  }
-  call_function("index_in_meta_binary", x, table)
-}
-
-#' @rdname match_arrow
-#' @export
-is_in <- function(x, table, ...) UseMethod("is_in")
-
-#' @export
-is_in.default <- function(x, table, ...) x %in% table
-
-#' @export
-is_in.ArrowDatum <- function(x, table, ...) {
-  if (!inherits(table, c("Array", "DictionaryArray", "ChunkedArray"))) {
-    table <- Array$create(table)
-  }
-  call_function("is_in_meta_binary", x, table)
-}
-
-#' `table` for Arrow objects
-#'
-#' This function tabulates the values in the array and returns a table of counts.
-#' @param x `Array` or `ChunkedArray`
-#' @return A `StructArray` containing "values" (same type as `x`) and "counts"
-#' `Int64`.
-#' @export
-value_counts <- function(x) {
-  call_function("value_counts", x)
-}
-
-#' Cast options
-#'
-#' @param safe logical: enforce safe conversion? Default `TRUE`
-#' @param ... additional cast options, such as `allow_int_overflow`,
-#' `allow_time_truncate`, and `allow_float_truncate`, which are set to `!safe`
-#' by default
-#' @return A list
-#' @export
-#' @keywords internal
-cast_options <- function(safe = TRUE, ...) {
-  opts <- list(
-    allow_int_overflow = !safe,
-    allow_time_truncate = !safe,
-    allow_float_truncate = !safe
-  )
-  modifyList(opts, list(...))
-}
diff --git a/r/R/config.R b/r/R/config.R
deleted file mode 100644
index 301d0fa..0000000
--- a/r/R/config.R
+++ /dev/null
@@ -1,30 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Manage the global CPU thread pool in libarrow
-#'
-#' @export
-cpu_count <- function() {
-  GetCpuThreadPoolCapacity()
-}
-
-#' @rdname cpu_count
-#' @param num_threads integer: New number of threads for thread pool
-#' @export
-set_cpu_count <- function(num_threads) {
-  SetCpuThreadPoolCapacity(as.integer(num_threads))
-}
diff --git a/r/R/csv.R b/r/R/csv.R
deleted file mode 100644
index 160c46e..0000000
--- a/r/R/csv.R
+++ /dev/null
@@ -1,587 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Read a CSV or other delimited file with Arrow
-#'
-#' These functions uses the Arrow C++ CSV reader to read into a `data.frame`.
-#' Arrow C++ options have been mapped to argument names that follow those of
-#' `readr::read_delim()`, and `col_select` was inspired by `vroom::vroom()`.
-#'
-#' `read_csv_arrow()` and `read_tsv_arrow()` are wrappers around
-#' `read_delim_arrow()` that specify a delimiter.
-#'
-#' Note that not all `readr` options are currently implemented here. Please file
-#' an issue if you encounter one that `arrow` should support.
-#'
-#' If you need to control Arrow-specific reader parameters that don't have an
-#' equivalent in `readr::read_csv()`, you can either provide them in the
-#' `parse_options`, `convert_options`, or `read_options` arguments, or you can
-#' use [CsvTableReader] directly for lower-level access.
-#'
-#' @section Specifying column types and names:
-#'
-#' By default, the CSV reader will infer the column names and data types from the file, but there
-#' are a few ways you can specify them directly.
-#'
-#' One way is to provide an Arrow [Schema] in the `schema` argument,
-#' which is an ordered map of column name to type.
-#' When provided, it satisfies both the `col_names` and `col_types` arguments.
-#' This is good if you know all of this information up front.
-#'
-#' You can also pass a `Schema` to the `col_types` argument. If you do this,
-#' column names will still be inferred from the file unless you also specify
-#' `col_names`. In either case, the column names in the `Schema` must match the
-#' data's column names, whether they are explicitly provided or inferred. That
-#' said, this `Schema` does not have to reference all columns: those omitted
-#' will have their types inferred.
-#'
-#' Alternatively, you can declare column types by providing the compact string representation
-#' that `readr` uses to the `col_types` argument. This means you provide a
-#' single string, one character per column, where the characters map to Arrow
-#' types analogously to the `readr` type mapping:
-#'
-#' * "c": `utf8()`
-#' * "i": `int32()`
-#' * "n": `float64()`
-#' * "d": `float64()`
-#' * "l": `bool()`
-#' * "f": `dictionary()`
-#' * "D": `date32()`
-#' * "T": `time32()`
-#' * "t": `timestamp()`
-#' * "_": `null()`
-#' * "-": `null()`
-#' * "?": infer the type from the data
-#'
-#' If you use the compact string representation for `col_types`, you must also
-#' specify `col_names`.
-#'
-#' Regardless of how types are specified, all columns with a `null()` type will
-#' be dropped.
-#'
-#' Note that if you are specifying column names, whether by `schema` or
-#' `col_names`, and the CSV file has a header row that would otherwise be used
-#' to idenfity column names, you'll need to add `skip = 1` to skip that row.
-#'
-#' @param file A character file name or URI, `raw` vector, an Arrow input stream,
-#' or a `FileSystem` with path (`SubTreeFileSystem`).
-#' If a file name, a memory-mapped Arrow [InputStream] will be opened and
-#' closed when finished; compression will be detected from the file extension
-#' and handled automatically. If an input stream is provided, it will be left
-#' open.
-#' @param delim Single character used to separate fields within a record.
-#' @param quote Single character used to quote strings.
-#' @param escape_double Does the file escape quotes by doubling them?
-#' i.e. If this option is `TRUE`, the value `""""` represents
-#' a single quote, `\"`.
-#' @param escape_backslash Does the file use backslashes to escape special
-#' characters? This is more general than `escape_double` as backslashes
-#' can be used to escape the delimiter character, the quote character, or
-#' to add special characters like `\\n`.
-#' @param schema [Schema] that describes the table. If provided, it will be
-#' used to satisfy both `col_names` and `col_types`.
-#' @param col_names If `TRUE`, the first row of the input will be used as the
-#' column names and will not be included in the data frame. If `FALSE`, column
-#' names will be generated by Arrow, starting with "f0", "f1", ..., "fN".
-#' Alternatively, you can specify a character vector of column names.
-#' @param col_types A compact string representation of the column types, or
-#' `NULL` (the default) to infer types from the data.
-#' @param col_select A character vector of column names to keep, as in the
-#' "select" argument to `data.table::fread()`, or a
-#' [tidy selection specification][tidyselect::vars_select()]
-#' of columns, as used in `dplyr::select()`.
-#' @param na A character vector of strings to interpret as missing values.
-#' @param quoted_na Should missing values inside quotes be treated as missing
-#' values (the default) or strings. (Note that this is different from the
-#' the Arrow C++ default for the corresponding convert option,
-#' `strings_can_be_null`.)
-#' @param skip_empty_rows Should blank rows be ignored altogether? If
-#' `TRUE`, blank rows will not be represented at all. If `FALSE`, they will be
-#' filled with missings.
-#' @param skip Number of lines to skip before reading data.
-#' @param timestamp_parsers User-defined timestamp parsers. If more than one
-#' parser is specified, the CSV conversion logic will try parsing values
-#' starting from the beginning of this vector. Possible values are:
-#'  - `NULL`: the default, which uses the ISO-8601 parser
-#'  - a character vector of [strptime][base::strptime()] parse strings
-#'  - a list of [TimestampParser] objects
-#' @param parse_options see [file reader options][CsvReadOptions].
-#' If given, this overrides any
-#' parsing options provided in other arguments (e.g. `delim`, `quote`, etc.).
-#' @param convert_options see [file reader options][CsvReadOptions]
-#' @param read_options see [file reader options][CsvReadOptions]
-#' @param as_data_frame Should the function return a `data.frame` (default) or
-#' an Arrow [Table]?
-#'
-#' @return A `data.frame`, or a Table if `as_data_frame = FALSE`.
-#' @export
-#' @examples
-#' \donttest{
-#'   tf <- tempfile()
-#'   on.exit(unlink(tf))
-#'   write.csv(mtcars, file = tf)
-#'   df <- read_csv_arrow(tf)
-#'   dim(df)
-#'   # Can select columns
-#'   df <- read_csv_arrow(tf, col_select = starts_with("d"))
-#' }
-read_delim_arrow <- function(file,
-                             delim = ",",
-                             quote = '"',
-                             escape_double = TRUE,
-                             escape_backslash = FALSE,
-                             schema = NULL,
-                             col_names = TRUE,
-                             col_types = NULL,
-                             col_select = NULL,
-                             na = c("", "NA"),
-                             quoted_na = TRUE,
-                             skip_empty_rows = TRUE,
-                             skip = 0L,
-                             parse_options = NULL,
-                             convert_options = NULL,
-                             read_options = NULL,
-                             as_data_frame = TRUE,
-                             timestamp_parsers = NULL) {
-  if (inherits(schema, "Schema")) {
-    col_names <- names(schema)
-    col_types <- schema
-  }
-  if (is.null(parse_options)) {
-    parse_options <- readr_to_csv_parse_options(
-      delim,
-      quote,
-      escape_double,
-      escape_backslash,
-      skip_empty_rows
-    )
-  }
-  if (is.null(read_options)) {
-    read_options <- readr_to_csv_read_options(skip, col_names)
-  }
-  if (is.null(convert_options)) {
-    convert_options <- readr_to_csv_convert_options(
-      na,
-      quoted_na,
-      col_types = col_types,
-      col_names = read_options$column_names,
-      timestamp_parsers = timestamp_parsers
-    )
-  }
-
-  if (!inherits(file, "InputStream")) {
-    file <- make_readable_file(file)
-    on.exit(file$close())
-  }
-  reader <- CsvTableReader$create(
-    file,
-    read_options = read_options,
-    parse_options = parse_options,
-    convert_options = convert_options
-  )
-
-  tab <- reader$Read()
-
-  # TODO: move this into convert_options using include_columns
-  col_select <- enquo(col_select)
-  if (!quo_is_null(col_select)) {
-    tab <- tab[vars_select(names(tab), !!col_select)]
-  }
-
-  if (isTRUE(as_data_frame)) {
-    tab <- as.data.frame(tab)
-  }
-
-  tab
-}
-
-#' @rdname read_delim_arrow
-#' @export
-read_csv_arrow <- function(file,
-                           quote = '"',
-                           escape_double = TRUE,
-                           escape_backslash = FALSE,
-                           schema = NULL,
-                           col_names = TRUE,
-                           col_types = NULL,
-                           col_select = NULL,
-                           na = c("", "NA"),
-                           quoted_na = TRUE,
-                           skip_empty_rows = TRUE,
-                           skip = 0L,
-                           parse_options = NULL,
-                           convert_options = NULL,
-                           read_options = NULL,
-                           as_data_frame = TRUE,
-                           timestamp_parsers = NULL) {
-
-  mc <- match.call()
-  mc$delim <- ","
-  mc[[1]] <- get("read_delim_arrow", envir = asNamespace("arrow"))
-  eval.parent(mc)
-}
-
-#' @rdname read_delim_arrow
-#' @export
-read_tsv_arrow <- function(file,
-                           quote = '"',
-                           escape_double = TRUE,
-                           escape_backslash = FALSE,
-                           schema = NULL,
-                           col_names = TRUE,
-                           col_types = NULL,
-                           col_select = NULL,
-                           na = c("", "NA"),
-                           quoted_na = TRUE,
-                           skip_empty_rows = TRUE,
-                           skip = 0L,
-                           parse_options = NULL,
-                           convert_options = NULL,
-                           read_options = NULL,
-                           as_data_frame = TRUE,
-                           timestamp_parsers = NULL) {
-
-  mc <- match.call()
-  mc$delim <- "\t"
-  mc[[1]] <- get("read_delim_arrow", envir = asNamespace("arrow"))
-  eval.parent(mc)
-}
-
-#' @title Arrow CSV and JSON table reader classes
-#' @rdname CsvTableReader
-#' @name CsvTableReader
-#' @docType class
-#' @usage NULL
-#' @format NULL
-#' @description `CsvTableReader` and `JsonTableReader` wrap the Arrow C++ CSV
-#' and JSON table readers. See their usage in [read_csv_arrow()] and
-#' [read_json_arrow()], respectively.
-#'
-#' @section Factory:
-#'
-#' The `CsvTableReader$create()` and `JsonTableReader$create()` factory methods
-#' take the following arguments:
-#'
-#' - `file` An Arrow [InputStream]
-#' - `convert_options` (CSV only), `parse_options`, `read_options`: see
-#'    [CsvReadOptions]
-#' - `...` additional parameters.
-#'
-#' @section Methods:
-#'
-#' - `$Read()`: returns an Arrow Table.
-#'
-#' @include arrow-package.R
-#' @export
-CsvTableReader <- R6Class("CsvTableReader", inherit = ArrowObject,
-  public = list(
-    Read = function() csv___TableReader__Read(self)
-  )
-)
-CsvTableReader$create <- function(file,
-                                  read_options = CsvReadOptions$create(),
-                                  parse_options = CsvParseOptions$create(),
-                                  convert_options = CsvConvertOptions$create(),
-                                  ...) {
-  assert_is(file, "InputStream")
-  csv___TableReader__Make(file, read_options, parse_options, convert_options)
-}
-
-#' @title File reader options
-#' @rdname CsvReadOptions
-#' @name CsvReadOptions
-#' @docType class
-#' @usage NULL
-#' @format NULL
-#' @description `CsvReadOptions`, `CsvParseOptions`, `CsvConvertOptions`,
-#' `JsonReadOptions`, `JsonParseOptions`, and `TimestampParser` are containers for various
-#' file reading options. See their usage in [read_csv_arrow()] and
-#' [read_json_arrow()], respectively.
-#'
-#' @section Factory:
-#'
-#' The `CsvReadOptions$create()` and `JsonReadOptions$create()` factory methods
-#' take the following arguments:
-#'
-#' - `use_threads` Whether to use the global CPU thread pool
-#' - `block_size` Block size we request from the IO layer; also determines
-#' the size of chunks when use_threads is `TRUE`. NB: if `FALSE`, JSON input
-#' must end with an empty line.
-#'
-#' `CsvReadOptions$create()` further accepts these additional arguments:
-#'
-#' - `skip_rows` Number of lines to skip before reading data (default 0)
-#' - `column_names` Character vector to supply column names. If length-0
-#' (the default), the first non-skipped row will be parsed to generate column
-#' names, unless `autogenerate_column_names` is `TRUE`.
-#' - `autogenerate_column_names` Logical: generate column names instead of
-#' using the first non-skipped row (the default)? If `TRUE`, column names will
-#' be "f0", "f1", ..., "fN".
-#'
-#' `CsvParseOptions$create()` takes the following arguments:
-#'
-#' - `delimiter` Field delimiting character (default `","`)
-#' - `quoting` Logical: are strings quoted? (default `TRUE`)
-#' - `quote_char` Quoting character, if `quoting` is `TRUE`
-#' - `double_quote` Logical: are quotes inside values double-quoted? (default `TRUE`)
-#' - `escaping` Logical: whether escaping is used (default `FALSE`)
-#' - `escape_char` Escaping character, if `escaping` is `TRUE`
-#' - `newlines_in_values` Logical: are values allowed to contain CR (`0x0d`)
-#'    and LF (`0x0a`) characters? (default `FALSE`)
-#' - `ignore_empty_lines` Logical: should empty lines be ignored (default) or
-#'    generate a row of missing values (if `FALSE`)?
-#'
-#' `JsonParseOptions$create()` accepts only the `newlines_in_values` argument.
-#'
-#' `CsvConvertOptions$create()` takes the following arguments:
-#'
-#' - `check_utf8` Logical: check UTF8 validity of string columns? (default `TRUE`)
-#' - `null_values` character vector of recognized spellings for null values.
-#'    Analogous to the `na.strings` argument to
-#'    [`read.csv()`][utils::read.csv()] or `na` in `readr::read_csv()`.
-#' - `strings_can_be_null` Logical: can string / binary columns have
-#'    null values? Similar to the `quoted_na` argument to `readr::read_csv()`.
-#'    (default `FALSE`)
-#' - `true_values` character vector of recognized spellings for `TRUE` values
-#' - `false_values` character vector of recognized spellings for `FALSE` values
-#' - `col_types` A `Schema` or `NULL` to infer types
-#' - `auto_dict_encode` Logical: Whether to try to automatically
-#'    dictionary-encode string / binary data (think `stringsAsFactors`). Default `FALSE`.
-#'    This setting is ignored for non-inferred columns (those in `col_types`).
-#' - `auto_dict_max_cardinality` If `auto_dict_encode`, string/binary columns
-#'    are dictionary-encoded up to this number of unique values (default 50),
-#'    after which it switches to regular encoding.
-#' - `include_columns` If non-empty, indicates the names of columns from the
-#'    CSV file that should be actually read and converted (in the vector's order).
-#' - `include_missing_columns` Logical: if `include_columns` is provided, should
-#'    columns named in it but not found in the data be included as a column of
-#'    type `null()`? The default (`FALSE`) means that the reader will instead
-#'    raise an error.
-#' - `timestamp_parsers` User-defined timestamp parsers. If more than one
-#'    parser is specified, the CSV conversion logic will try parsing values
-#'    starting from the beginning of this vector. Possible values are
-#'    (a) `NULL`, the default, which uses the ISO-8601 parser;
-#'    (b) a character vector of [strptime][base::strptime()] parse strings; or
-#'    (c) a list of [TimestampParser] objects.
-#'
-#' `TimestampParser$create()` takes an optional `format` string argument.
-#' See [`strptime()`][base::strptime()] for example syntax.
-#' The default is to use an ISO-8601 format parser.
-#' @section Active bindings:
-#'
-#' - `column_names`: from `CsvReadOptions`
-#'
-#' @export
-CsvReadOptions <- R6Class("CsvReadOptions",
-  inherit = ArrowObject,
-  active = list(
-    column_names = function() csv___ReadOptions__column_names(self)
-  )
-)
-CsvReadOptions$create <- function(use_threads = option_use_threads(),
-                                  block_size = 1048576L,
-                                  skip_rows = 0L,
-                                  column_names = character(0),
-                                  autogenerate_column_names = FALSE) {
-  csv___ReadOptions__initialize(
-    list(
-      use_threads = use_threads,
-      block_size = block_size,
-      skip_rows = skip_rows,
-      column_names = column_names,
-      autogenerate_column_names = autogenerate_column_names
-    )
-  )
-}
-
-readr_to_csv_read_options <- function(skip, col_names, col_types) {
-  if (isTRUE(col_names)) {
-    # C++ default to parse is 0-length string array
-    col_names <- character(0)
-  }
-  if (identical(col_names, FALSE)) {
-    CsvReadOptions$create(skip_rows = skip, autogenerate_column_names = TRUE)
-  } else {
-    CsvReadOptions$create(skip_rows = skip, column_names = col_names)
-  }
-}
-
-#' @rdname CsvReadOptions
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @export
-CsvParseOptions <- R6Class("CsvParseOptions", inherit = ArrowObject)
-CsvParseOptions$create <- function(delimiter = ",",
-                                   quoting = TRUE,
-                                   quote_char = '"',
-                                   double_quote = TRUE,
-                                   escaping = FALSE,
-                                   escape_char = '\\',
-                                   newlines_in_values = FALSE,
-                                   ignore_empty_lines = TRUE) {
-
-  csv___ParseOptions__initialize(
-    list(
-      delimiter = delimiter,
-      quoting = quoting,
-      quote_char = quote_char,
-      double_quote = double_quote,
-      escaping = escaping,
-      escape_char = escape_char,
-      newlines_in_values = newlines_in_values,
-      ignore_empty_lines = ignore_empty_lines
-    )
-  )
-}
-
-readr_to_csv_parse_options <- function(delim = ",",
-                                       quote = '"',
-                                       escape_double = TRUE,
-                                       escape_backslash = FALSE,
-                                       skip_empty_rows = TRUE) {
-  # This function translates from the readr argument list to the arrow arg names
-  # TODO: validate inputs
-  CsvParseOptions$create(
-    delimiter = delim,
-    quoting = nzchar(quote),
-    quote_char = quote,
-    double_quote = escape_double,
-    escaping = escape_backslash,
-    escape_char = '\\',
-    newlines_in_values = escape_backslash,
-    ignore_empty_lines = skip_empty_rows
-  )
-}
-
-#' @rdname CsvReadOptions
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @export
-TimestampParser <- R6Class("TimestampParser", inherit = ArrowObject,
-  public = list(
-    kind = function() TimestampParser__kind(self),
-    format = function() TimestampParser__format(self)
-  )
-)
-TimestampParser$create <- function(format = NULL) {
-  if (is.null(format)) {
-    TimestampParser__MakeISO8601()
-  } else {
-    TimestampParser__MakeStrptime(format)
-  }
-}
-
-#' @rdname CsvReadOptions
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @export
-CsvConvertOptions <- R6Class("CsvConvertOptions", inherit = ArrowObject)
-CsvConvertOptions$create <- function(check_utf8 = TRUE,
-                                     null_values = c("", "NA"),
-                                     true_values = c("T", "true", "TRUE"),
-                                     false_values= c("F", "false", "FALSE"),
-                                     strings_can_be_null = FALSE,
-                                     col_types = NULL,
-                                     auto_dict_encode = FALSE,
-                                     auto_dict_max_cardinality = 50L,
-                                     include_columns = character(),
-                                     include_missing_columns = FALSE,
-                                     timestamp_parsers = NULL) {
-
-  if (!is.null(col_types) && !inherits(col_types, "Schema")) {
-    abort(c(
-      "Unsupported `col_types` specification.",
-      i = "`col_types` must be NULL, or a <Schema>."
-    ))
-  }
-
-  csv___ConvertOptions__initialize(
-    list(
-      check_utf8 = check_utf8,
-      null_values = null_values,
-      strings_can_be_null = strings_can_be_null,
-      col_types = col_types,
-      true_values = true_values,
-      false_values = false_values,
-      auto_dict_encode = auto_dict_encode,
-      auto_dict_max_cardinality = auto_dict_max_cardinality,
-      include_columns = include_columns,
-      include_missing_columns = include_missing_columns,
-      timestamp_parsers = timestamp_parsers
-    )
-  )
-}
-
-readr_to_csv_convert_options <- function(na,
-                                         quoted_na,
-                                         col_types = NULL,
-                                         col_names = NULL,
-                                         timestamp_parsers = NULL) {
-  include_columns <- character()
-
-  if (is.character(col_types)) {
-    if (length(col_types) != 1L) {
-      abort("`col_types` is a character vector that is not of size 1")
-    }
-    n <- nchar(col_types)
-    specs <- substring(col_types, seq_len(n), seq_len(n))
-    if (!is_bare_character(col_names, n)) {
-      abort("Compact specification for `col_types` requires `col_names`")
-    }
-
-    col_types <- set_names(nm = col_names, map2(specs, col_names, ~{
-      switch(.x,
-             "c" = utf8(),
-             "i" = int32(),
-             "n" = float64(),
-             "d" = float64(),
-             "l" = bool(),
-             "f" = dictionary(),
-             "D" = date32(),
-             "T" = time32(),
-             "t" = timestamp(),
-             "_" = null(),
-             "-" = null(),
-             "?" = NULL,
-             abort("Unsupported compact specification: '", .x,"' for column '", .y, "'")
-      )
-    }))
-    # To "guess" types, omit them from col_types
-    col_types <- keep(col_types, ~!is.null(.x))
-    col_types <- schema(!!!col_types)
-  }
-
-  if (!is.null(col_types)) {
-    assert_is(col_types, "Schema")
-    # If any columns are null(), drop them
-    # (by specifying the other columns in include_columns)
-    nulls <- map_lgl(col_types$fields, ~.$type$Equals(null()))
-    if (any(nulls)) {
-      include_columns <- setdiff(col_names, names(col_types)[nulls])
-    }
-  }
-  CsvConvertOptions$create(
-    null_values = na,
-    strings_can_be_null = quoted_na,
-    col_types = col_types,
-    timestamp_parsers = timestamp_parsers,
-    include_columns = include_columns
-  )
-}
diff --git a/r/R/dataset-factory.R b/r/R/dataset-factory.R
deleted file mode 100644
index 0e029cb..0000000
--- a/r/R/dataset-factory.R
+++ /dev/null
@@ -1,169 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include dataset.R
-
-#' @usage NULL
-#' @format NULL
-#' @rdname Dataset
-#' @export
-DatasetFactory <- R6Class("DatasetFactory", inherit = ArrowObject,
-  public = list(
-    Finish = function(schema = NULL, unify_schemas = FALSE) {
-      if (is.null(schema)) {
-        dataset___DatasetFactory__Finish1(self, unify_schemas)
-      } else {
-        assert_is(schema, "Schema")
-        dataset___DatasetFactory__Finish2(self, schema)
-      }
-    },
-    Inspect = function(unify_schemas = FALSE) {
-      dataset___DatasetFactory__Inspect(self, unify_schemas)
-    }
-  )
-)
-DatasetFactory$create <- function(x,
-                                  filesystem = NULL,
-                                  format = c("parquet", "arrow", "ipc", "feather", "csv", "tsv", "text"),
-                                  partitioning = NULL,
-                                  ...) {
-  if (is_list_of(x, "DatasetFactory")) {
-    return(dataset___UnionDatasetFactory__Make(x))
-  }
-
-  if (is.character(format)) {
-    format <- FileFormat$create(match.arg(format), ...)
-  } else {
-    assert_is(format, "FileFormat")
-  }
-
-  path_and_fs <- get_paths_and_filesystem(x, filesystem)
-  info <- path_and_fs$fs$GetFileInfo(path_and_fs$path)
-
-  if (length(info) > 1 || info[[1]]$type == FileType$File) {
-    # x looks like a vector of one or more file paths (not a directory path)
-    return(FileSystemDatasetFactory$create(path_and_fs$fs, NULL, path_and_fs$path, format))
-  }
-
-  if (!is.null(partitioning)) {
-    if (inherits(partitioning, "Schema")) {
-      partitioning <- DirectoryPartitioning$create(partitioning)
-    } else if (is.character(partitioning)) {
-      # These are the column/field names, and we should autodetect their types
-      partitioning <- DirectoryPartitioningFactory$create(partitioning)
-    }
-  }
-
-  selector <- FileSelector$create(path_and_fs$path, allow_not_found = FALSE, recursive = TRUE)
-
-  FileSystemDatasetFactory$create(path_and_fs$fs, selector, NULL, format, partitioning)
-}
-
-#' Create a DatasetFactory
-#'
-#' A [Dataset] can constructed using one or more [DatasetFactory]s.
-#' This function helps you construct a `DatasetFactory` that you can pass to
-#' [open_dataset()].
-#'
-#' If you would only have a single `DatasetFactory` (for example, you have a
-#' single directory containing Parquet files), you can call `open_dataset()`
-#' directly. Use `dataset_factory()` when you
-#' want to combine different directories, file systems, or file formats.
-#'
-#' @param x A string path to a directory containing data files, a vector of one
-#' one or more string paths to data files, or a list of `DatasetFactory` objects
-#' whose datasets should be combined. If this argument is specified it will be
-#' used to construct a `UnionDatasetFactory` and other arguments will be
-#' ignored.
-#' @param filesystem A [FileSystem] object; if omitted, the `FileSystem` will
-#' be detected from `x`
-#' @param format A [FileFormat] object, or a string identifier of the format of
-#' the files in `x`. Currently supported values:
-#' * "parquet"
-#' * "ipc"/"arrow"/"feather", all aliases for each other; for Feather, note that
-#'   only version 2 files are supported
-#' * "csv"/"text", aliases for the same thing (because comma is the default
-#'   delimiter for text files
-#' * "tsv", equivalent to passing `format = "text", delimiter = "\t"`
-#'
-#' Default is "parquet", unless a `delimiter` is also specified, in which case
-#' it is assumed to be "text".
-#' @param partitioning One of
-#'   * A `Schema`, in which case the file paths relative to `sources` will be
-#'    parsed, and path segments will be matched with the schema fields. For
-#'    example, `schema(year = int16(), month = int8())` would create partitions
-#'    for file paths like "2019/01/file.parquet", "2019/02/file.parquet", etc.
-#'   * A character vector that defines the field names corresponding to those
-#'    path segments (that is, you're providing the names that would correspond
-#'    to a `Schema` but the types will be autodetected)
-#'   * A `HivePartitioning` or `HivePartitioningFactory`, as returned
-#'    by [hive_partition()] which parses explicit or autodetected fields from
-#'    Hive-style path segments
-#'   * `NULL` for no partitioning
-#' @param ... Additional format-specific options, passed to
-#' `FileFormat$create()`. For CSV options, note that you can specify them either
-#' with the Arrow C++ library naming ("delimiter", "quoting", etc.) or the
-#' `readr`-style naming used in [read_csv_arrow()] ("delim", "quote", etc.).
-#' Not all `readr` options are currently supported; please file an issue if you
-#' encounter one that `arrow` should support.
-#' @return A `DatasetFactory` object. Pass this to [open_dataset()],
-#' in a list potentially with other `DatasetFactory` objects, to create
-#' a `Dataset`.
-#' @export
-dataset_factory <- DatasetFactory$create
-
-#' @usage NULL
-#' @format NULL
-#' @rdname Dataset
-#' @export
-FileSystemDatasetFactory <- R6Class("FileSystemDatasetFactory",
-  inherit = DatasetFactory
-)
-FileSystemDatasetFactory$create <- function(filesystem,
-                                            selector = NULL,
-                                            paths = NULL,
-                                            format,
-                                            partitioning = NULL) {
-  assert_is(filesystem, "FileSystem")
-  is.null(selector) || assert_is(selector, "FileSelector")
-  is.null(paths) || assert_is(paths, "character")
-  assert_that(
-    xor(is.null(selector), is.null(paths)),
-    msg = "Either selector or paths must be specified"
-  )
-  assert_is(format, "FileFormat")
-  if (!is.null(paths)) {
-    assert_that(is.null(partitioning), msg = "Partitioning not supported with paths")
-  }
-
-  if (!is.null(paths)) {
-    ptr <- dataset___FileSystemDatasetFactory__Make0(filesystem, paths, format)
-  } else if (is.null(partitioning)) {
-    ptr <- dataset___FileSystemDatasetFactory__Make1(filesystem, selector, format)
-  } else if (inherits(partitioning, "PartitioningFactory")) {
-    ptr <- dataset___FileSystemDatasetFactory__Make3(filesystem, selector, format, partitioning)
-  } else if (inherits(partitioning, "Partitioning")) {
-    ptr <- dataset___FileSystemDatasetFactory__Make2(filesystem, selector, format, partitioning)
-  } else {
-    stop(
-      "Expected 'partitioning' to be NULL, PartitioningFactory or Partitioning",
-      call. = FALSE
-    )
-  }
-
-  ptr
-}
diff --git a/r/R/dataset-format.R b/r/R/dataset-format.R
deleted file mode 100644
index 854672b..0000000
--- a/r/R/dataset-format.R
+++ /dev/null
@@ -1,320 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Dataset file formats
-#'
-#' @description
-#' A `FileFormat` holds information about how to read and parse the files
-#' included in a `Dataset`. There are subclasses corresponding to the supported
-#' file formats (`ParquetFileFormat` and `IpcFileFormat`).
-#'
-#' @section Factory:
-#' `FileFormat$create()` takes the following arguments:
-#' * `format`: A string identifier of the file format. Currently supported values:
-#'   * "parquet"
-#'   * "ipc"/"arrow"/"feather", all aliases for each other; for Feather, note that
-#'     only version 2 files are supported
-#'   * "csv"/"text", aliases for the same thing (because comma is the default
-#'     delimiter for text files
-#'   * "tsv", equivalent to passing `format = "text", delimiter = "\t"`
-#' * `...`: Additional format-specific options
-#'
-#'   `format = "parquet"``:
-#'   * `dict_columns`: Names of columns which should be read as dictionaries.
-#'   * Any Parquet options from [FragmentScanOptions].
-#'
-#'   `format = "text"`: see [CsvParseOptions]. Note that you can specify them either
-#'   with the Arrow C++ library naming ("delimiter", "quoting", etc.) or the
-#'   `readr`-style naming used in [read_csv_arrow()] ("delim", "quote", etc.).
-#'   Not all `readr` options are currently supported; please file an issue if
-#'   you encounter one that `arrow` should support. Also, the following options are
-#'   supported. From [CsvReadOptions]:
-#'   * `skip_rows`
-#'   * `column_names`
-#'   * `autogenerate_column_names`
-#'   From [CsvFragmentScanOptions] (these values can be overridden at scan time):
-#'   * `convert_options`: a [CsvConvertOptions]
-#'   * `block_size`
-#'
-#' It returns the appropriate subclass of `FileFormat` (e.g. `ParquetFileFormat`)
-#' @rdname FileFormat
-#' @name FileFormat
-#' @export
-FileFormat <- R6Class("FileFormat", inherit = ArrowObject,
-  active = list(
-    # @description
-    # Return the `FileFormat`'s type
-    type = function() dataset___FileFormat__type_name(self)
-  )
-)
-FileFormat$create <- function(format, ...) {
-  opt_names <- names(list(...))
-  if (format %in% c("csv", "text") || any(opt_names %in% c("delim", "delimiter"))) {
-    CsvFileFormat$create(...)
-  } else if (format == c("tsv")) {
-    CsvFileFormat$create(delimiter = "\t", ...)
-  } else if (format == "parquet") {
-    ParquetFileFormat$create(...)
-  } else if (format %in% c("ipc", "arrow", "feather")) { # These are aliases for the same thing
-    dataset___IpcFileFormat__Make()
-  } else {
-    stop("Unsupported file format: ", format, call. = FALSE)
-  }
-}
-
-#' @export
-as.character.FileFormat <- function(x, ...) {
-  out <- x$type
-  # Slight hack: special case IPC -> feather, otherwise is just the type_name
-  ifelse(out == "ipc", "feather", out)
-}
-
-#' @usage NULL
-#' @format NULL
-#' @rdname FileFormat
-#' @export
-ParquetFileFormat <- R6Class("ParquetFileFormat", inherit = FileFormat)
-ParquetFileFormat$create <- function(...,
-                                     dict_columns = character(0)) {
- options <- ParquetFragmentScanOptions$create(...)
- dataset___ParquetFileFormat__Make(options, dict_columns)
-}
-
-#' @usage NULL
-#' @format NULL
-#' @rdname FileFormat
-#' @export
-IpcFileFormat <- R6Class("IpcFileFormat", inherit = FileFormat)
-
-#' @usage NULL
-#' @format NULL
-#' @rdname FileFormat
-#' @export
-CsvFileFormat <- R6Class("CsvFileFormat", inherit = FileFormat)
-CsvFileFormat$create <- function(..., opts = csv_file_format_parse_options(...),
-                                 convert_options = csv_file_format_convert_options(...),
-                                 read_options = csv_file_format_read_options(...)) {
-  dataset___CsvFileFormat__Make(opts, convert_options, read_options)
-}
-
-# Support both readr-style option names and Arrow C++ option names
-csv_file_format_parse_options <- function(...) {
-  opts <- list(...)
-  # Filter out arguments meant for CsvConvertOptions/CsvReadOptions
-  convert_opts <- names(formals(CsvConvertOptions$create))
-  read_opts <- names(formals(CsvReadOptions$create))
-  opts[convert_opts] <- NULL
-  opts[read_opts] <- NULL
-  opt_names <- names(opts)
-  # Catch any readr-style options specified with full option names that are
-  # supported by read_delim_arrow() (and its wrappers) but are not yet
-  # supported here
-  unsup_readr_opts <- setdiff(
-    names(formals(read_delim_arrow)),
-    names(formals(readr_to_csv_parse_options))
-  )
-  is_unsup_opt <- opt_names %in% unsup_readr_opts
-  unsup_opts <- opt_names[is_unsup_opt]
-  if (length(unsup_opts)) {
-    stop(
-      "The following ",
-      ngettext(length(unsup_opts), "option is ", "options are "),
-      "supported in \"read_delim_arrow\" functions ",
-      "but not yet supported here: ",
-      oxford_paste(unsup_opts),
-      call. = FALSE
-    )
-  }
-  # Catch any options with full or partial names that do not match any of the
-  # recognized Arrow C++ option names or readr-style option names
-  arrow_opts <- names(formals(CsvParseOptions$create))
-  readr_opts <- names(formals(readr_to_csv_parse_options))
-  is_arrow_opt <- !is.na(pmatch(opt_names, arrow_opts))
-  is_readr_opt <- !is.na(pmatch(opt_names, readr_opts))
-  unrec_opts <- opt_names[!is_arrow_opt & !is_readr_opt]
-  if (length(unrec_opts)) {
-    stop(
-      "Unrecognized ",
-      ngettext(length(unrec_opts), "option", "options"),
-      ": ",
-      oxford_paste(unrec_opts),
-      call. = FALSE
-    )
-  }
-  # Catch options with ambiguous partial names (such as "del") that make it
-  # unclear whether the user is specifying Arrow C++ options ("delimiter") or
-  # readr-style options ("delim")
-  is_ambig_opt <- is.na(pmatch(opt_names, c(arrow_opts, readr_opts)))
-  ambig_opts <- opt_names[is_ambig_opt]
-  if (length(ambig_opts)) {
-    stop("Ambiguous ",
-         ngettext(length(ambig_opts), "option", "options"),
-         ": ",
-         oxford_paste(ambig_opts),
-         ". Use full argument names",
-         call. = FALSE)
-  }
-  if (any(is_readr_opt)) {
-    # Catch cases when the user specifies a mix of Arrow C++ options and
-    # readr-style options
-    if (!all(is_readr_opt)) {
-      stop("Use either Arrow parse options or readr parse options, not both",
-           call. = FALSE)
-    }
-    do.call(readr_to_csv_parse_options, opts) # all options have readr-style names
-  } else {
-    do.call(CsvParseOptions$create, opts) # all options have Arrow C++ names
-  }
-}
-
-csv_file_format_convert_options <- function(...) {
-  opts <- list(...)
-  # Filter out arguments meant for CsvParseOptions/CsvReadOptions
-  arrow_opts <- names(formals(CsvParseOptions$create))
-  readr_opts <- names(formals(readr_to_csv_parse_options))
-  read_opts <- names(formals(CsvReadOptions$create))
-  opts[arrow_opts] <- NULL
-  opts[readr_opts] <- NULL
-  opts[read_opts] <- NULL
-  do.call(CsvConvertOptions$create, opts)
-}
-
-csv_file_format_read_options <- function(...) {
-  opts <- list(...)
-  # Filter out arguments meant for CsvParseOptions/CsvConvertOptions
-  arrow_opts <- names(formals(CsvParseOptions$create))
-  readr_opts <- names(formals(readr_to_csv_parse_options))
-  convert_opts <- names(formals(CsvConvertOptions$create))
-  opts[arrow_opts] <- NULL
-  opts[readr_opts] <- NULL
-  opts[convert_opts] <- NULL
-  do.call(CsvReadOptions$create, opts)
-}
-
-#' Format-specific scan options
-#'
-#' @description
-#' A `FragmentScanOptions` holds options specific to a `FileFormat` and a scan
-#' operation.
-#'
-#' @section Factory:
-#' `FragmentScanOptions$create()` takes the following arguments:
-#' * `format`: A string identifier of the file format. Currently supported values:
-#'   * "parquet"
-#'   * "csv"/"text", aliases for the same format.
-#' * `...`: Additional format-specific options
-#'
-#'   `format = "parquet"``:
-#'   * `use_buffered_stream`: Read files through buffered input streams rather than
-#'                            loading entire row groups at once. This may be enabled
-#'                            to reduce memory overhead. Disabled by default.
-#'   * `buffer_size`: Size of buffered stream, if enabled. Default is 8KB.
-#'   * `pre_buffer`: Pre-buffer the raw Parquet data. This can improve performance
-#'                   on high-latency filesystems. Disabled by default.
-#
-#'   `format = "text"`: see [CsvConvertOptions]. Note that options can only be
-#'   specified with the Arrow C++ library naming. Also, "block_size" from
-#'   [CsvReadOptions] may be given.
-#'
-#' It returns the appropriate subclass of `FragmentScanOptions`
-#' (e.g. `CsvFragmentScanOptions`).
-#' @rdname FragmentScanOptions
-#' @name FragmentScanOptions
-#' @export
-FragmentScanOptions <- R6Class("FragmentScanOptions", inherit = ArrowObject,
-  active = list(
-    # @description
-    # Return the `FragmentScanOptions`'s type
-    type = function() dataset___FragmentScanOptions__type_name(self)
-  )
-)
-FragmentScanOptions$create <- function(format, ...) {
-  opt_names <- names(list(...))
-  if (format %in% c("csv", "text", "tsv")) {
-    CsvFragmentScanOptions$create(...)
-  } else if (format == "parquet") {
-    ParquetFragmentScanOptions$create(...)
-  } else {
-    stop("Unsupported file format: ", format, call. = FALSE)
-  }
-}
-
-#' @export
-as.character.FragmentScanOptions <- function(x, ...) {
-  x$type
-}
-
-#' @usage NULL
-#' @format NULL
-#' @rdname FragmentScanOptions
-#' @export
-CsvFragmentScanOptions <- R6Class("CsvFragmentScanOptions", inherit = FragmentScanOptions)
-CsvFragmentScanOptions$create <- function(...,
-                                          convert_opts = csv_file_format_convert_options(...),
-                                          read_opts = csv_file_format_read_options(...)) {
-  dataset___CsvFragmentScanOptions__Make(convert_opts, read_opts)
-}
-
-#' @usage NULL
-#' @format NULL
-#' @rdname FragmentScanOptions
-#' @export
-ParquetFragmentScanOptions <- R6Class("ParquetFragmentScanOptions", inherit = FragmentScanOptions)
-ParquetFragmentScanOptions$create <- function(use_buffered_stream = FALSE,
-                                              buffer_size = 8196,
-                                              pre_buffer = FALSE) {
-  dataset___ParquetFragmentScanOptions__Make(use_buffered_stream, buffer_size, pre_buffer)
-}
-
-#' Format-specific write options
-#'
-#' @description
-#' A `FileWriteOptions` holds write options specific to a `FileFormat`.
-FileWriteOptions <- R6Class("FileWriteOptions", inherit = ArrowObject,
-  public = list(
-    update = function(...) {
-      if (self$type == "parquet") {
-        dataset___ParquetFileWriteOptions__update(self,
-            ParquetWriterProperties$create(...),
-            ParquetArrowWriterProperties$create(...))
-      } else if (self$type == "ipc") {
-        args <- list(...)
-        if (is.null(args$codec)) {
-          dataset___IpcFileWriteOptions__update1(self,
-              get_ipc_use_legacy_format(args$use_legacy_format),
-              get_ipc_metadata_version(args$metadata_version))
-        } else {
-          dataset___IpcFileWriteOptions__update2(self,
-              get_ipc_use_legacy_format(args$use_legacy_format),
-              args$codec,
-              get_ipc_metadata_version(args$metadata_version))
-        }
-      }
-      invisible(self)
-    }
-  ),
-  active = list(
-    type = function() dataset___FileWriteOptions__type_name(self)
-  )
-)
-FileWriteOptions$create <- function(format, ...) {
-  if (!inherits(format, "FileFormat")) {
-    format <- FileFormat$create(format)
-  }
-  options <- dataset___FileFormat__DefaultWriteOptions(format)
-  options$update(...)
-}
diff --git a/r/R/dataset-partition.R b/r/R/dataset-partition.R
deleted file mode 100644
index e40427a..0000000
--- a/r/R/dataset-partition.R
+++ /dev/null
@@ -1,125 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Define Partitioning for a Dataset
-#'
-#' @description
-#' Pass a `Partitioning` object to a [FileSystemDatasetFactory]'s `$create()`
-#' method to indicate how the file's paths should be interpreted to define
-#' partitioning.
-#'
-#' `DirectoryPartitioning` describes how to interpret raw path segments, in
-#' order. For example, `schema(year = int16(), month = int8())` would define
-#' partitions for file paths like "2019/01/file.parquet",
-#' "2019/02/file.parquet", etc. In this scheme `NULL` values will be skipped. In
-#' the previous example: when writing a dataset if the month was `NA` (or
-#' `NULL`), the files would be placed in "2019/file.parquet". When reading, the
-#' rows in "2019/file.parquet" would return an `NA` for the month column. An
-#' error will be raised if an outer directory is `NULL` and an inner directory
-#' is not.
-#'
-#' `HivePartitioning` is for Hive-style partitioning, which embeds field
-#' names and values in path segments, such as
-#' "/year=2019/month=2/data.parquet". Because fields are named in the path
-#' segments, order does not matter. This partitioning scheme allows `NULL`
-#' values. They will be replaced by a configurable `null_fallback` which
-#' defaults to the string `"__HIVE_DEFAULT_PARTITION__"` when writing. When
-#' reading, the `null_fallback` string will be replaced with `NA`s as
-#' appropriate.
-#'
-#' `PartitioningFactory` subclasses instruct the `DatasetFactory` to detect
-#' partition features from the file paths.
-#' @section Factory:
-#' Both `DirectoryPartitioning$create()` and `HivePartitioning$create()`
-#' methods take a [Schema] as a single input argument. The helper
-#' function [`hive_partition(...)`][hive_partition] is shorthand for
-#' `HivePartitioning$create(schema(...))`.
-#'
-#' With `DirectoryPartitioningFactory$create()`, you can provide just the
-#' names of the path segments (in our example, `c("year", "month")`), and
-#' the `DatasetFactory` will infer the data types for those partition variables.
-#' `HivePartitioningFactory$create()` takes no arguments: both variable names
-#' and their types can be inferred from the file paths. `hive_partition()` with
-#' no arguments returns a `HivePartitioningFactory`.
-#' @name Partitioning
-#' @rdname Partitioning
-#' @export
-Partitioning <- R6Class("Partitioning", inherit = ArrowObject)
-#' @usage NULL
-#' @format NULL
-#' @rdname Partitioning
-#' @export
-DirectoryPartitioning <- R6Class("DirectoryPartitioning", inherit = Partitioning)
-DirectoryPartitioning$create <- dataset___DirectoryPartitioning
-
-#' @usage NULL
-#' @format NULL
-#' @rdname Partitioning
-#' @export
-HivePartitioning <- R6Class("HivePartitioning", inherit = Partitioning)
-HivePartitioning$create <- function(schm, null_fallback = NULL) {
-  dataset___HivePartitioning(schm, null_fallback = null_fallback_or_default(null_fallback))
-}
-
-#' Construct Hive partitioning
-#'
-#' Hive partitioning embeds field names and values in path segments, such as
-#' "/year=2019/month=2/data.parquet".
-#'
-#' Because fields are named in the path segments, order of fields passed to
-#' `hive_partition()` does not matter.
-#' @param ... named list of [data types][data-type], passed to [schema()]
-#' @param null_fallback character to be used in place of missing values (`NA` or `NULL`)
-#' in partition columns. Default is `"__HIVE_DEFAULT_PARTITION__"`,
-#' which is what Hive uses.
-#' @return A [HivePartitioning][Partitioning], or a `HivePartitioningFactory` if
-#' calling `hive_partition()` with no arguments.
-#' @examples
-#' \dontrun{
-#' hive_partition(year = int16(), month = int8())
-#' }
-#' @export
-hive_partition <- function(..., null_fallback = NULL) {
-  schm <- schema(...)
-  if (length(schm) == 0) {
-    HivePartitioningFactory$create(null_fallback)
-  } else {
-    HivePartitioning$create(schm, null_fallback)
-  }
-}
-
-PartitioningFactory <- R6Class("PartitioningFactory", inherit = ArrowObject)
-
-#' @usage NULL
-#' @format NULL
-#' @rdname Partitioning
-#' @export
-DirectoryPartitioningFactory <- R6Class("DirectoryPartitioningFactory ", inherit = PartitioningFactory)
-DirectoryPartitioningFactory$create <- dataset___DirectoryPartitioning__MakeFactory
-
-#' @usage NULL
-#' @format NULL
-#' @rdname Partitioning
-#' @export
-HivePartitioningFactory <- R6Class("HivePartitioningFactory", inherit = PartitioningFactory)
-HivePartitioningFactory$create <- function(null_fallback = NULL) {
-  dataset___HivePartitioning__MakeFactory(null_fallback_or_default(null_fallback))
-}
-
-null_fallback_or_default <- function(null_fallback) {
-  null_fallback %||% "__HIVE_DEFAULT_PARTITION__"
-}
diff --git a/r/R/dataset-scan.R b/r/R/dataset-scan.R
deleted file mode 100644
index 750401e..0000000
--- a/r/R/dataset-scan.R
+++ /dev/null
@@ -1,202 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Scan the contents of a dataset
-#'
-#' @description
-#' A `Scanner` iterates over a [Dataset]'s fragments and returns data
-#' according to given row filtering and column projection. A `ScannerBuilder`
-#' can help create one.
-#'
-#' @section Factory:
-#' `Scanner$create()` wraps the `ScannerBuilder` interface to make a `Scanner`.
-#' It takes the following arguments:
-#'
-#' * `dataset`: A `Dataset` or `arrow_dplyr_query` object, as returned by the
-#'    `dplyr` methods on `Dataset`.
-#' * `projection`: A character vector of column names to select
-#' * `filter`: A `Expression` to filter the scanned rows by, or `TRUE` (default)
-#'    to keep all rows.
-#' * `use_threads`: logical: should scanning use multithreading? Default `TRUE`
-#' * `...`: Additional arguments, currently ignored
-#' @section Methods:
-#' `ScannerBuilder` has the following methods:
-#'
-#' - `$Project(cols)`: Indicate that the scan should only return columns given
-#' by `cols`, a character vector of column names
-#' - `$Filter(expr)`: Filter rows by an [Expression].
-#' - `$UseThreads(threads)`: logical: should the scan use multithreading?
-#' The method's default input is `TRUE`, but you must call the method to enable
-#' multithreading because the scanner default is `FALSE`.
-#' - `$BatchSize(batch_size)`: integer: Maximum row count of scanned record
-#' batches, default is 32K. If scanned record batches are overflowing memory
-#' then this method can be called to reduce their size.
-#' - `$schema`: Active binding, returns the [Schema] of the Dataset
-#' - `$Finish()`: Returns a `Scanner`
-#'
-#' `Scanner` currently has a single method, `$ToTable()`, which evaluates the
-#' query and returns an Arrow [Table].
-#' @rdname Scanner
-#' @name Scanner
-#' @export
-Scanner <- R6Class("Scanner", inherit = ArrowObject,
-  public = list(
-    ToTable = function() dataset___Scanner__ToTable(self),
-    ScanBatches = function() dataset___Scanner__ScanBatches(self)
-  ),
-  active = list(
-    schema = function() dataset___Scanner__schema(self)
-  )
-)
-Scanner$create <- function(dataset,
-                           projection = NULL,
-                           filter = TRUE,
-                           use_threads = option_use_threads(),
-                           batch_size = NULL,
-                           fragment_scan_options = NULL,
-                           ...) {
-  if (inherits(dataset, "arrow_dplyr_query")) {
-    if (inherits(dataset$.data, "ArrowTabular")) {
-      # To handle mutate() on Table/RecordBatch, we need to collect(as_data_frame=FALSE) now
-      dataset <- dplyr::collect(dataset, as_data_frame = FALSE)
-    }
-    return(Scanner$create(
-      dataset$.data,
-      c(dataset$selected_columns, dataset$temp_columns),
-      dataset$filtered_rows,
-      use_threads,
-      batch_size,
-      fragment_scan_options,
-      ...
-    ))
-  }
-  if (inherits(dataset, c("data.frame", "RecordBatch", "Table"))) {
-    dataset <- InMemoryDataset$create(dataset)
-  }
-  assert_is(dataset, "Dataset")
-
-  scanner_builder <- dataset$NewScan()
-  if (use_threads) {
-    scanner_builder$UseThreads()
-  }
-  if (!is.null(projection)) {
-    scanner_builder$Project(projection)
-  }
-  if (!isTRUE(filter)) {
-    scanner_builder$Filter(filter)
-  }
-  if (is_integerish(batch_size)) {
-    scanner_builder$BatchSize(batch_size)
-  }
-  if (!is.null(fragment_scan_options)) {
-    scanner_builder$FragmentScanOptions(fragment_scan_options)
-  }
-  scanner_builder$Finish()
-}
-
-#' @export
-names.Scanner <- function(x) names(x$schema)
-
-ScanTask <- R6Class("ScanTask", inherit = ArrowObject,
-  public = list(
-    Execute = function() dataset___ScanTask__get_batches(self)
-  )
-)
-
-#' Apply a function to a stream of RecordBatches
-#'
-#' As an alternative to calling `collect()` on a `Dataset` query, you can
-#' use this function to access the stream of `RecordBatch`es in the `Dataset`.
-#' This lets you aggregate on each chunk and pull the intermediate results into
-#' a `data.frame` for further aggregation, even if you couldn't fit the whole
-#' `Dataset` result in memory.
-#'
-#' This is experimental and not recommended for production use.
-#'
-#' @param X A `Dataset` or `arrow_dplyr_query` object, as returned by the
-#' `dplyr` methods on `Dataset`.
-#' @param FUN A function or `purrr`-style lambda expression to apply to each
-#' batch
-#' @param ... Additional arguments passed to `FUN`
-#' @param .data.frame logical: collect the resulting chunks into a single
-#' `data.frame`? Default `TRUE`
-#' @export
-map_batches <- function(X, FUN, ..., .data.frame = TRUE) {
-  if (.data.frame) {
-    lapply <- map_dfr
-  }
-  scanner <- Scanner$create(ensure_group_vars(X))
-  FUN <- as_mapper(FUN)
-  # message("Making ScanTasks")
-  lapply(scanner$ScanBatches(), function(batch) {
-    # message("Processing Batch")
-    # TODO: wrap batch in arrow_dplyr_query with X$selected_columns,
-    # X$temp_columns, and X$group_by_vars
-    # if X is arrow_dplyr_query, if some other arg (.dplyr?) == TRUE
-    FUN(batch, ...)
-  })
-}
-
-#' @usage NULL
-#' @format NULL
-#' @rdname Scanner
-#' @export
-ScannerBuilder <- R6Class("ScannerBuilder", inherit = ArrowObject,
-  public = list(
-    Project = function(cols) {
-      # cols is either a character vector or a named list of Expressions
-      if (is.character(cols)) {
-        dataset___ScannerBuilder__ProjectNames(self, cols)
-      } else {
-        # If we have expressions, but they all turn out to be field_refs,
-        # we can still call the simple method
-        field_names <- get_field_names(cols)
-        if (all(nzchar(field_names))) {
-          dataset___ScannerBuilder__ProjectNames(self, field_names)
-        } else {
-          # Else, we are projecting/mutating
-          dataset___ScannerBuilder__ProjectExprs(self, cols, names(cols))
-        }
-      }
-      self
-    },
-    Filter = function(expr) {
-      assert_is(expr, "Expression")
-      dataset___ScannerBuilder__Filter(self, expr)
-      self
-    },
-    UseThreads = function(threads = option_use_threads()) {
-      dataset___ScannerBuilder__UseThreads(self, threads)
-      self
-    },
-    BatchSize = function(batch_size) {
-      dataset___ScannerBuilder__BatchSize(self, batch_size)
-      self
-    },
-    FragmentScanOptions = function(options) {
-      dataset___ScannerBuilder__FragmentScanOptions(self, options)
-      self
-    },
-    Finish = function() dataset___ScannerBuilder__Finish(self)
-  ),
-  active = list(
-    schema = function() dataset___ScannerBuilder__schema(self)
-  )
-)
-
-#' @export
-names.ScannerBuilder <- function(x) names(x$schema)
diff --git a/r/R/dataset-write.R b/r/R/dataset-write.R
deleted file mode 100644
index 8c9a1ef..0000000
--- a/r/R/dataset-write.R
+++ /dev/null
@@ -1,99 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Write a dataset
-#'
-#' This function allows you to write a dataset. By writing to more efficient
-#' binary storage formats, and by specifying relevant partitioning, you can
-#' make it much faster to read and query.
-#'
-#' @param dataset [Dataset], [RecordBatch], [Table], `arrow_dplyr_query`, or
-#' `data.frame`. If an `arrow_dplyr_query` or `grouped_df`,
-#' `schema` and `partitioning` will be taken from the result of any `select()`
-#' and `group_by()` operations done on the dataset. `filter()` queries will be
-#' applied to restrict written rows.
-#' Note that `select()`-ed columns may not be renamed.
-#' @param path string path, URI, or `SubTreeFileSystem` referencing a directory
-#' to write to (directory will be created if it does not exist)
-#' @param format a string identifier of the file format. Default is to use
-#' "parquet" (see [FileFormat])
-#' @param partitioning `Partitioning` or a character vector of columns to
-#' use as partition keys (to be written as path segments). Default is to
-#' use the current `group_by()` columns.
-#' @param basename_template string template for the names of files to be written.
-#' Must contain `"{i}"`, which will be replaced with an autoincremented
-#' integer to generate basenames of datafiles. For example, `"part-{i}.feather"`
-#' will yield `"part-0.feather", ...`.
-#' @param hive_style logical: write partition segments as Hive-style
-#' (`key1=value1/key2=value2/file.ext`) or as just bare values. Default is `TRUE`.
-#' @param ... additional format-specific arguments. For available Parquet
-#' options, see [write_parquet()]. The available Feather options are
-#' - `use_legacy_format` logical: write data formatted so that Arrow libraries
-#'   versions 0.14 and lower can read it. Default is `FALSE`. You can also
-#'   enable this by setting the environment variable `ARROW_PRE_0_15_IPC_FORMAT=1`.
-#' - `metadata_version`: A string like "V5" or the equivalent integer indicating
-#'   the Arrow IPC MetadataVersion. Default (NULL) will use the latest version,
-#'   unless the environment variable `ARROW_PRE_1_0_METADATA_VERSION=1`, in
-#'   which case it will be V4.
-#' - `codec`: A [Codec] which will be used to compress body buffers of written
-#'   files. Default (NULL) will not compress body buffers.
-#' - `null_fallback`: character to be used in place of missing values (`NA` or
-#' `NULL`) when using Hive-style partitioning. See [hive_partition()].
-#' @return The input `dataset`, invisibly
-#' @export
-write_dataset <- function(dataset,
-                          path,
-                          format = c("parquet", "feather", "arrow", "ipc"),
-                          partitioning = dplyr::group_vars(dataset),
-                          basename_template = paste0("part-{i}.", as.character(format)),
-                          hive_style = TRUE,
-                          ...) {
-  format <- match.arg(format)
-  if (inherits(dataset, "arrow_dplyr_query")) {
-    if (inherits(dataset$.data, "ArrowTabular")) {
-      # collect() to materialize any mutate/rename
-      dataset <- dplyr::collect(dataset, as_data_frame = FALSE)
-    }
-    # We can select a subset of columns but we can't rename them
-    if (!all(get_field_names(dataset) == names(dataset$selected_columns))) {
-      stop("Renaming columns when writing a dataset is not yet supported", call. = FALSE)
-    }
-    # partitioning vars need to be in the `select` schema
-    dataset <- ensure_group_vars(dataset)
-  } else if (inherits(dataset, "grouped_df")) {
-    force(partitioning)
-    # Drop the grouping metadata before writing; we've already consumed it
-    # now to construct `partitioning` and don't want it in the metadata$r
-    dataset <- dplyr::ungroup(dataset)
-  }
-
-  scanner <- Scanner$create(dataset)
-  if (!inherits(partitioning, "Partitioning")) {
-    partition_schema <- scanner$schema[partitioning]
-    if (isTRUE(hive_style)) {
-      partitioning <- HivePartitioning$create(partition_schema, null_fallback = list(...)$null_fallback)
-    } else {
-      partitioning <- DirectoryPartitioning$create(partition_schema)
-    }
-  }
-
-  path_and_fs <- get_path_and_filesystem(path)
-  options <- FileWriteOptions$create(format, table = scanner, ...)
-
-  dataset___Dataset__Write(options, path_and_fs$fs, path_and_fs$path,
-                           partitioning, basename_template, scanner)
-}
diff --git a/r/R/dataset.R b/r/R/dataset.R
deleted file mode 100644
index 2666339..0000000
--- a/r/R/dataset.R
+++ /dev/null
@@ -1,320 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Open a multi-file dataset
-#'
-#' Arrow Datasets allow you to query against data that has been split across
-#' multiple files. This sharding of data may indicate partitioning, which
-#' can accelerate queries that only touch some partitions (files). Call
-#' `open_dataset()` to point to a directory of data files and return a
-#' `Dataset`, then use `dplyr` methods to query it.
-#'
-#' @param sources One of:
-#'   * a string path or URI to a directory containing data files
-#'   * a string path or URI to a single file
-#'   * a character vector of paths or URIs to individual data files
-#'   * a list of `Dataset` objects as created by this function
-#'   * a list of `DatasetFactory` objects as created by [dataset_factory()].
-#'
-#' When `sources` is a vector of file URIs, they must all use the same protocol
-#' and point to files located in the same file system and having the same
-#' format.
-#' @param schema [Schema] for the `Dataset`. If `NULL` (the default), the schema
-#' will be inferred from the data sources.
-#' @param partitioning When `sources` is a directory path/URI, one of:
-#'   * a `Schema`, in which case the file paths relative to `sources` will be
-#'    parsed, and path segments will be matched with the schema fields. For
-#'    example, `schema(year = int16(), month = int8())` would create partitions
-#'    for file paths like `"2019/01/file.parquet"`, `"2019/02/file.parquet"`,
-#'    etc.
-#'   * a character vector that defines the field names corresponding to those
-#'    path segments (that is, you're providing the names that would correspond
-#'    to a `Schema` but the types will be autodetected)
-#'   * a `HivePartitioning` or `HivePartitioningFactory`, as returned
-#'    by [hive_partition()] which parses explicit or autodetected fields from
-#'    Hive-style path segments
-#'   * `NULL` for no partitioning
-#'
-#' The default is to autodetect Hive-style partitions. When `sources` is not a
-#' directory path/URI, `partitioning` is ignored.
-#' @param unify_schemas logical: should all data fragments (files, `Dataset`s)
-#' be scanned in order to create a unified schema from them? If `FALSE`, only
-#' the first fragment will be inspected for its schema. Use this fast path
-#' when you know and trust that all fragments have an identical schema.
-#' The default is `FALSE` when creating a dataset from a directory path/URI or
-#' vector of file paths/URIs (because there may be many files and scanning may
-#' be slow) but `TRUE` when `sources` is a list of `Dataset`s (because there
-#' should be few `Dataset`s in the list and their `Schema`s are already in
-#' memory).
-#' @param ... additional arguments passed to `dataset_factory()` when `sources`
-#' is a directory path/URI or vector of file paths/URIs, otherwise ignored.
-#' These may include `format` to indicate the file format, or other
-#' format-specific options.
-#' @return A [Dataset] R6 object. Use `dplyr` methods on it to query the data,
-#' or call [`$NewScan()`][Scanner] to construct a query directly.
-#' @export
-#' @seealso `vignette("dataset", package = "arrow")`
-#' @include arrow-package.R
-open_dataset <- function(sources,
-                         schema = NULL,
-                         partitioning = hive_partition(),
-                         unify_schemas = NULL,
-                         ...) {
-  if (is_list_of(sources, "Dataset")) {
-    if (is.null(schema)) {
-      if (is.null(unify_schemas) || isTRUE(unify_schemas)) {
-        # Default is to unify schemas here
-        schema <- unify_schemas(schemas = map(sources, ~.$schema))
-      } else {
-        # Take the first one.
-        schema <- sources[[1]]$schema
-      }
-    }
-    # Enforce that all datasets have the same schema
-    assert_is(schema, "Schema")
-    sources <- lapply(sources, function(x) {
-      x$schema <- schema
-      x
-    })
-    return(dataset___UnionDataset__create(sources, schema))
-  }
-  factory <- DatasetFactory$create(sources, partitioning = partitioning, ...)
-  # Default is _not_ to inspect/unify schemas
-  factory$Finish(schema, isTRUE(unify_schemas))
-}
-
-#' Multi-file datasets
-#'
-#' @description
-#' Arrow Datasets allow you to query against data that has been split across
-#' multiple files. This sharding of data may indicate partitioning, which
-#' can accelerate queries that only touch some partitions (files).
-#'
-#' A `Dataset` contains one or more `Fragments`, such as files, of potentially
-#' differing type and partitioning.
-#'
-#' For `Dataset$create()`, see [open_dataset()], which is an alias for it.
-#'
-#' `DatasetFactory` is used to provide finer control over the creation of `Dataset`s.
-#'
-#' @section Factory:
-#' `DatasetFactory` is used to create a `Dataset`, inspect the [Schema] of the
-#' fragments contained in it, and declare a partitioning.
-#' `FileSystemDatasetFactory` is a subclass of `DatasetFactory` for
-#' discovering files in the local file system, the only currently supported
-#' file system.
-#'
-#' For the `DatasetFactory$create()` factory method, see [dataset_factory()], an
-#' alias for it. A `DatasetFactory` has:
-#'
-#' - `$Inspect(unify_schemas)`: If `unify_schemas` is `TRUE`, all fragments
-#' will be scanned and a unified [Schema] will be created from them; if `FALSE`
-#' (default), only the first fragment will be inspected for its schema. Use this
-#' fast path when you know and trust that all fragments have an identical schema.
-#' - `$Finish(schema, unify_schemas)`: Returns a `Dataset`. If `schema` is provided,
-#' it will be used for the `Dataset`; if omitted, a `Schema` will be created from
-#' inspecting the fragments (files) in the dataset, following `unify_schemas`
-#' as described above.
-#'
-#' `FileSystemDatasetFactory$create()` is a lower-level factory method and
-#' takes the following arguments:
-#' * `filesystem`: A [FileSystem]
-#' * `selector`: Either a [FileSelector] or `NULL`
-#' * `paths`: Either a character vector of file paths or `NULL`
-#' * `format`: A [FileFormat]
-#' * `partitioning`: Either `Partitioning`, `PartitioningFactory`, or `NULL`
-#' @section Methods:
-#'
-#' A `Dataset` has the following methods:
-#' - `$NewScan()`: Returns a [ScannerBuilder] for building a query
-#' - `$schema`: Active binding that returns the [Schema] of the Dataset; you
-#'   may also replace the dataset's schema by using `ds$schema <- new_schema`.
-#'   This method currently supports only adding, removing, or reordering
-#'   fields in the schema: you cannot alter or cast the field types.
-#'
-#' `FileSystemDataset` has the following methods:
-#' - `$files`: Active binding, returns the files of the `FileSystemDataset`
-#' - `$format`: Active binding, returns the [FileFormat] of the `FileSystemDataset`
-#'
-#' `UnionDataset` has the following methods:
-#' - `$children`: Active binding, returns all child `Dataset`s.
-#'
-#' @export
-#' @seealso [open_dataset()] for a simple interface to creating a `Dataset`
-Dataset <- R6Class("Dataset", inherit = ArrowObject,
-  public = list(
-    # @description
-    # Start a new scan of the data
-    # @return A [ScannerBuilder]
-    NewScan = function() dataset___Dataset__NewScan(self),
-    ToString = function() self$schema$ToString()
-  ),
-  active = list(
-    schema = function(schema) {
-      if (missing(schema)) {
-        dataset___Dataset__schema(self)
-      } else {
-        assert_is(schema, "Schema")
-        invisible(dataset___Dataset__ReplaceSchema(self, schema))
-      }
-    },
-    metadata = function() self$schema$metadata,
-    num_rows = function() {
-      warning("Number of rows unknown; returning NA", call. = FALSE)
-      NA_integer_
-    },
-    num_cols = function() length(self$schema),
-    # @description
-    # Return the Dataset's type.
-    type = function() dataset___Dataset__type_name(self)
-  )
-)
-Dataset$create <- open_dataset
-
-#' @name FileSystemDataset
-#' @rdname Dataset
-#' @export
-FileSystemDataset <- R6Class("FileSystemDataset", inherit = Dataset,
-  public = list(
-    .class_title = function() {
-      nfiles <- length(self$files)
-      file_type <- self$format$type
-      pretty_file_type <- list(
-        parquet = "Parquet",
-        ipc = "Feather"
-      )[[file_type]]
-
-      paste(
-        class(self)[[1]],
-        "with",
-        nfiles,
-        pretty_file_type %||% file_type,
-        ifelse(nfiles == 1, "file", "files")
-      )
-    }
-  ),
-  active = list(
-    # @description
-    # Return the files contained in this `FileSystemDataset`
-    files = function() dataset___FileSystemDataset__files(self),
-    # @description
-    # Return the format of files in this `Dataset`
-    format = function() {
-      dataset___FileSystemDataset__format(self)
-    },
-    # @description
-    # Return the filesystem of files in this `Dataset`
-    filesystem = function() {
-      dataset___FileSystemDataset__filesystem(self)
-    },
-    num_rows = function() {
-      if (inherits(self$format, "ParquetFileFormat")) {
-        # It's generally fast enough to skim the files directly
-        sum(map_int(self$files, ~ParquetFileReader$create(.x)$num_rows))
-      } else {
-        # TODO: implement for other file formats
-        warning("Number of rows unknown; returning NA", call. = FALSE)
-        NA_integer_
-        # Could do a scan, picking only the last column, which hopefully is virtual
-        # But this is can be slow
-        # Scanner$create(self, projection = tail(names(self), 1))$ToTable()$num_rows
-        # See also https://issues.apache.org/jira/browse/ARROW-9697
-      }
-    }
-  )
-)
-
-#' @name UnionDataset
-#' @rdname Dataset
-#' @export
-UnionDataset <- R6Class("UnionDataset", inherit = Dataset,
-  active = list(
-    # @description
-    # Return the UnionDataset's child `Dataset`s
-    children = function() {
-      dataset___UnionDataset__children(self)
-    }
-  )
-)
-
-#' @name InMemoryDataset
-#' @rdname Dataset
-#' @export
-InMemoryDataset <- R6Class("InMemoryDataset", inherit = Dataset)
-InMemoryDataset$create <- function(x) {
-  if (!inherits(x, "Table")) {
-    x <- Table$create(x)
-  }
-  dataset___InMemoryDataset__create(x)
-}
-
-
-#' @export
-names.Dataset <- function(x) names(x$schema)
-
-#' @export
-dim.Dataset <- function(x) c(x$num_rows, x$num_cols)
-
-#' @export
-c.Dataset <- function(...) Dataset$create(list(...))
-
-#' @export
-head.Dataset <- function(x, n = 6L, ...) {
-  assert_that(n > 0) # For now
-  scanner <- Scanner$create(ensure_group_vars(x))
-  dataset___Scanner__head(scanner, n)
-}
-
-#' @export
-tail.Dataset <- function(x, n = 6L, ...) {
-  assert_that(n > 0) # For now
-  result <- list()
-  batch_num <- 0
-  scanner <- Scanner$create(ensure_group_vars(x))
-  for (batch in rev(dataset___Scanner__ScanBatches(scanner))) {
-    batch_num <- batch_num + 1
-    result[[batch_num]] <- tail(batch, n)
-    n <- n - nrow(batch)
-    if (n <= 0) break
-  }
-  Table$create(!!!rev(result))
-}
-
-#' @export
-`[.Dataset` <- function(x, i, j, ..., drop = FALSE) {
-  if (nargs() == 2L) {
-    # List-like column extraction (x[i])
-    return(x[, i])
-  }
-  if (!missing(j)) {
-    x <- select.Dataset(x, j)
-  }
-
-  if (!missing(i)) {
-    x <- take_dataset_rows(x, i)
-  }
-  x
-}
-
-take_dataset_rows <- function(x, i) {
-  if (!is.numeric(i) || any(i < 0)) {
-    stop("Only slicing with positive indices is supported", call. = FALSE)
-  }
-  scanner <- Scanner$create(ensure_group_vars(x))
-  i <- Array$create(i - 1)
-  dataset___Scanner__TakeRows(scanner, i)
-}
diff --git a/r/R/deprecated.R b/r/R/deprecated.R
deleted file mode 100644
index e8848c4..0000000
--- a/r/R/deprecated.R
+++ /dev/null
@@ -1,40 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @rdname read_ipc_stream
-#' @export
-read_arrow <- function(file, ...) {
-  .Deprecated(msg = "Use 'read_ipc_stream' or 'read_feather' instead.")
-  if (inherits(file, "raw")) {
-    read_ipc_stream(file, ...)
-  } else {
-    read_feather(file, ...)
-  }
-}
-
-#' @rdname write_ipc_stream
-#' @export
-write_arrow <- function(x, sink, ...) {
-  .Deprecated(msg = "Use 'write_ipc_stream' or 'write_feather' instead.")
-  if (inherits(sink, "raw")) {
-    # HACK for sparklyr
-    # Note that this returns a new R raw vector, not the one passed as `sink`
-    write_to_raw(x)
-  } else {
-    write_feather(x, sink, ...)
-  }
-}
diff --git a/r/R/dictionary.R b/r/R/dictionary.R
deleted file mode 100644
index b701768..0000000
--- a/r/R/dictionary.R
+++ /dev/null
@@ -1,69 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include type.R
-
-#' @title class DictionaryType
-#'
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#'
-#' @section Methods:
-#'
-#' TODO
-#'
-#' @rdname DictionaryType
-#' @name DictionaryType
-DictionaryType <- R6Class("DictionaryType",
-  inherit = FixedWidthType,
-  public = list(
-    ToString = function() {
-      prettier_dictionary_type(DataType__ToString(self))
-    }
-  ),
-  active = list(
-    index_type = function() DictionaryType__index_type(self),
-    value_type = function() DictionaryType__value_type(self),
-    name = function() DictionaryType__name(self),
-    ordered = function() DictionaryType__ordered(self)
-  )
-)
-DictionaryType$create <- function(index_type = int32(),
-                                  value_type = utf8(),
-                                  ordered = FALSE) {
-  assert_is(index_type, "DataType")
-  assert_is(value_type, "DataType")
-  DictionaryType__initialize(index_type, value_type, ordered)
-}
-
-#' Create a dictionary type
-#'
-#' @param index_type A DataType for the indices (default [int32()])
-#' @param value_type A DataType for the values (default [utf8()])
-#' @param ordered Is this an ordered dictionary (default `FALSE`)?
-#'
-#' @return A [DictionaryType]
-#' @seealso [Other Arrow data types][data-type]
-#' @export
-dictionary <- DictionaryType$create
-
-prettier_dictionary_type <- function(x) {
-  # Prettier format the "ordered" attribute
-  x <- sub(", ordered=0", "", x)
-  sub("ordered=1", "ordered", x)
-}
diff --git a/r/R/dplyr.R b/r/R/dplyr.R
deleted file mode 100644
index 845cb3a..0000000
--- a/r/R/dplyr.R
+++ /dev/null
@@ -1,1101 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include expression.R
-#' @include record-batch.R
-#' @include table.R
-
-arrow_dplyr_query <- function(.data) {
-  # An arrow_dplyr_query is a container for an Arrow data object (Table,
-  # RecordBatch, or Dataset) and the state of the user's dplyr query--things
-  # like selected columns, filters, and group vars.
-
-  # For most dplyr methods,
-  # method.Table == method.RecordBatch == method.Dataset == method.arrow_dplyr_query
-  # This works because the functions all pass .data through arrow_dplyr_query()
-  if (inherits(.data, "arrow_dplyr_query")) {
-    return(.data)
-  }
-  structure(
-    list(
-      .data = .data$clone(),
-      # selected_columns is a named list:
-      # * contents are references/expressions pointing to the data
-      # * names are the names they should be in the end (i.e. this
-      #   records any renaming)
-      selected_columns = make_field_refs(names(.data), dataset = inherits(.data, "Dataset")),
-      # filtered_rows will be an Expression
-      filtered_rows = TRUE,
-      # group_by_vars is a character vector of columns (as renamed)
-      # in the data. They will be kept when data is pulled into R.
-      group_by_vars = character(),
-      # drop_empty_groups is a logical value indicating whether to drop
-      # groups formed by factor levels that don't appear in the data. It
-      # should be non-null only when the data is grouped.
-      drop_empty_groups = NULL,
-      # arrange_vars will be a list of expressions named by their associated
-      # column names
-      arrange_vars = list(),
-      # arrange_desc will be a logical vector indicating the sort order for each
-      # expression in arrange_vars (FALSE for ascending, TRUE for descending)
-      arrange_desc = logical()
-    ),
-    class = "arrow_dplyr_query"
-  )
-}
-
-#' @export
-print.arrow_dplyr_query <- function(x, ...) {
-  schm <- x$.data$schema
-  cols <- get_field_names(x)
-  # If cols are expressions, they won't be in the schema and will be "" in cols
-  fields <- map_chr(cols, function(name) {
-    if (nzchar(name)) {
-      schm$GetFieldByName(name)$ToString()
-    } else {
-      "expr"
-    }
-  })
-  # Strip off the field names as they are in the dataset and add the renamed ones
-  fields <- paste(names(cols), sub("^.*?: ", "", fields), sep = ": ", collapse = "\n")
-  cat(class(x$.data)[1], " (query)\n", sep = "")
-  cat(fields, "\n", sep = "")
-  cat("\n")
-  if (!isTRUE(x$filtered_rows)) {
-    if (query_on_dataset(x)) {
-      filter_string <- x$filtered_rows$ToString()
-    } else {
-      filter_string <- .format_array_expression(x$filtered_rows)
-    }
-    cat("* Filter: ", filter_string, "\n", sep = "")
-  }
-  if (length(x$group_by_vars)) {
-    cat("* Grouped by ", paste(x$group_by_vars, collapse = ", "), "\n", sep = "")
-  }
-  if (length(x$arrange_vars)) {
-    if (query_on_dataset(x)) {
-      arrange_strings <- map_chr(x$arrange_vars, function(x) x$ToString())
-    } else {
-      arrange_strings <- map_chr(x$arrange_vars, .format_array_expression)
-    }
-    cat(
-      "* Sorted by ",
-      paste(
-        paste0(
-          arrange_strings,
-          " [", ifelse(x$arrange_desc, "desc", "asc"), "]"
-        ),
-        collapse = ", "
-      ),
-      "\n",
-      sep = ""
-    )
-  }
-  cat("See $.data for the source Arrow object\n")
-  invisible(x)
-}
-
-get_field_names <- function(selected_cols) {
-  if (inherits(selected_cols, "arrow_dplyr_query")) {
-    selected_cols <- selected_cols$selected_columns
-  }
-  map_chr(selected_cols, function(x) {
-    if (inherits(x, "Expression")) {
-      out <- x$field_name
-    } else if (inherits(x, "array_expression")) {
-      out <- x$args$field_name
-    } else {
-      out <- NULL
-    }
-    # If x isn't some kind of field reference, out is NULL,
-    # but we always need to return a string
-    out %||% ""
-  })
-}
-
-make_field_refs <- function(field_names, dataset = TRUE) {
-  if (dataset) {
-    out <- lapply(field_names, Expression$field_ref)
-  } else {
-    out <- lapply(field_names, function(x) array_expression("array_ref", field_name = x))
-  }
-  set_names(out, field_names)
-}
-
-# These are the names reflecting all select/rename, not what is in Arrow
-#' @export
-names.arrow_dplyr_query <- function(x) names(x$selected_columns)
-
-#' @export
-dim.arrow_dplyr_query <- function(x) {
-  cols <- length(names(x))
-
-  if (isTRUE(x$filtered)) {
-    rows <- x$.data$num_rows
-  } else if (query_on_dataset(x)) {
-    warning("Number of rows unknown; returning NA", call. = FALSE)
-    # TODO: https://issues.apache.org/jira/browse/ARROW-9697
-    rows <- NA_integer_
-  } else {
-    # Evaluate the filter expression to a BooleanArray and count
-    rows <- as.integer(sum(eval_array_expression(x$filtered_rows, x$.data), na.rm = TRUE))
-  }
-  c(rows, cols)
-}
-
-#' @export
-as.data.frame.arrow_dplyr_query <- function(x, row.names = NULL, optional = FALSE, ...) {
-  collect.arrow_dplyr_query(x, as_data_frame = TRUE, ...)
-}
-
-#' @export
-head.arrow_dplyr_query <- function(x, n = 6L, ...) {
-  if (query_on_dataset(x)) {
-    head.Dataset(x, n, ...)
-  } else {
-    out <- collect.arrow_dplyr_query(x, as_data_frame = FALSE)
-    if (inherits(out, "arrow_dplyr_query")) {
-      out$.data <- head(out$.data, n)
-    } else {
-      out <- head(out, n)
-    }
-    out
-  }
-}
-
-#' @export
-tail.arrow_dplyr_query <- function(x, n = 6L, ...) {
-  if (query_on_dataset(x)) {
-    tail.Dataset(x, n, ...)
-  } else {
-    out <- collect.arrow_dplyr_query(x, as_data_frame = FALSE)
-    if (inherits(out, "arrow_dplyr_query")) {
-      out$.data <- tail(out$.data, n)
-    } else {
-      out <- tail(out, n)
-    }
-    out
-  }
-}
-
-#' @export
-`[.arrow_dplyr_query` <- function(x, i, j, ..., drop = FALSE) {
-  if (query_on_dataset(x)) {
-    `[.Dataset`(x, i, j, ..., drop = FALSE)
-  } else {
-    stop(
-      "[ method not implemented for queries. Call 'collect(x, as_data_frame = FALSE)' first",
-      call. = FALSE
-    )
-  }
-}
-
-# The following S3 methods are registered on load if dplyr is present
-tbl_vars.arrow_dplyr_query <- function(x) names(x$selected_columns)
-
-select.arrow_dplyr_query <- function(.data, ...) {
-  check_select_helpers(enexprs(...))
-  column_select(arrow_dplyr_query(.data), !!!enquos(...))
-}
-select.Dataset <- select.ArrowTabular <- select.arrow_dplyr_query
-
-rename.arrow_dplyr_query <- function(.data, ...) {
-  check_select_helpers(enexprs(...))
-  column_select(arrow_dplyr_query(.data), !!!enquos(...), .FUN = vars_rename)
-}
-rename.Dataset <- rename.ArrowTabular <- rename.arrow_dplyr_query
-
-column_select <- function(.data, ..., .FUN = vars_select) {
-  # .FUN is either tidyselect::vars_select or tidyselect::vars_rename
-  # It operates on the names() of selected_columns, i.e. the column names
-  # factoring in any renaming that may already have happened
-  out <- .FUN(names(.data), !!!enquos(...))
-  # Make sure that the resulting selected columns map back to the original data,
-  # as in when there are multiple renaming steps
-  .data$selected_columns <- set_names(.data$selected_columns[out], names(out))
-
-  # If we've renamed columns, we need to project that renaming into other
-  # query parameters we've collected
-  renamed <- out[names(out) != out]
-  if (length(renamed)) {
-    # Massage group_by
-    gbv <- .data$group_by_vars
-    renamed_groups <- gbv %in% renamed
-    gbv[renamed_groups] <- names(renamed)[match(gbv[renamed_groups], renamed)]
-    .data$group_by_vars <- gbv
-    # No need to massage filters because those contain references to Arrow objects
-  }
-  .data
-}
-
-relocate.arrow_dplyr_query <- function(.data, ..., .before = NULL, .after = NULL) {
-  # The code in this function is adapted from the code in dplyr::relocate.data.frame
-  # at https://github.com/tidyverse/dplyr/blob/master/R/relocate.R
-  # TODO: revisit this after https://github.com/tidyverse/dplyr/issues/5829
-  check_select_helpers(c(enexprs(...), enexpr(.before), enexpr(.after)))
-
-  .data <- arrow_dplyr_query(.data)
-
-  to_move <- eval_select(expr(c(...)), .data$selected_columns)
-
-  .before <- enquo(.before)
-  .after <- enquo(.after)
-  has_before <- !quo_is_null(.before)
-  has_after <- !quo_is_null(.after)
-
-  if (has_before && has_after) {
-    abort("Must supply only one of `.before` and `.after`.")
-  } else if (has_before) {
-    where <- min(unname(eval_select(.before, .data$selected_columns)))
-    if (!where %in% to_move) {
-      to_move <- c(to_move, where)
-    }
-  } else if (has_after) {
-    where <- max(unname(eval_select(.after, .data$selected_columns)))
-    if (!where %in% to_move) {
-      to_move <- c(where, to_move)
-    }
-  } else {
-    where <- 1L
-    if (!where %in% to_move) {
-      to_move <- c(to_move, where)
-    }
-  }
-
-  lhs <- setdiff(seq2(1, where - 1), to_move)
-  rhs <- setdiff(seq2(where + 1, length(.data$selected_columns)), to_move)
-
-  pos <- vec_unique(c(lhs, to_move, rhs))
-  new_names <- names(pos)
-  .data$selected_columns <- .data$selected_columns[pos]
-
-  if (!is.null(new_names)) {
-    names(.data$selected_columns)[new_names != ""] <- new_names[new_names != ""]
-  }
-  .data
-}
-relocate.Dataset <- relocate.ArrowTabular <- relocate.arrow_dplyr_query
-
-check_select_helpers <- function(exprs) {
-  # Throw an error if unsupported tidyselect selection helpers in `exprs`
-  exprs <- lapply(exprs, function(x) if (is_quosure(x)) quo_get_expr(x) else x)
-  unsup_select_helpers <- "where"
-  funs_in_exprs <- unlist(lapply(exprs, all_funs))
-  unsup_funs <- funs_in_exprs[funs_in_exprs %in% unsup_select_helpers]
-  if (length(unsup_funs)) {
-    stop(
-      "Unsupported selection ",
-      ngettext(length(unsup_funs), "helper: ", "helpers: "),
-      oxford_paste(paste0(unsup_funs, "()"), quote = FALSE),
-      call. = FALSE
-    )
-  }
-}
-
-filter.arrow_dplyr_query <- function(.data, ..., .preserve = FALSE) {
-  # TODO something with the .preserve argument
-  filts <- quos(...)
-  if (length(filts) == 0) {
-    # Nothing to do
-    return(.data)
-  }
-
-  .data <- arrow_dplyr_query(.data)
-  # tidy-eval the filter expressions inside an Arrow data_mask
-  filters <- lapply(filts, arrow_eval, arrow_mask(.data))
-  bad_filters <- map_lgl(filters, ~inherits(., "try-error"))
-  if (any(bad_filters)) {
-    bads <- oxford_paste(map_chr(filts, as_label)[bad_filters], quote = FALSE)
-    if (query_on_dataset(.data)) {
-      # Abort. We don't want to auto-collect if this is a Dataset because that
-      # could blow up, too big.
-      stop(
-        "Filter expression not supported for Arrow Datasets: ", bads,
-        "\nCall collect() first to pull data into R.",
-        call. = FALSE
-      )
-    } else {
-      # TODO: only show this in some debug mode?
-      warning(
-        "Filter expression not implemented in Arrow: ", bads, "; pulling data into R",
-        immediate. = TRUE,
-        call. = FALSE
-      )
-      # Set any valid filters first, then collect and then apply the invalid ones in R
-      .data <- set_filters(.data, filters[!bad_filters])
-      return(dplyr::filter(dplyr::collect(.data), !!!filts[bad_filters]))
-    }
-  }
-
-  set_filters(.data, filters)
-}
-filter.Dataset <- filter.ArrowTabular <- filter.arrow_dplyr_query
-
-arrow_eval <- function (expr, mask) {
-  # filter(), mutate(), etc. work by evaluating the quoted `exprs` to generate Expressions
-  # with references to Arrays (if .data is Table/RecordBatch) or Fields (if
-  # .data is a Dataset).
-
-  # This yields an Expression as long as the `exprs` are implemented in Arrow.
-  # Otherwise, it returns a try-error
-  tryCatch(eval_tidy(expr, mask), error = function(e) {
-    # Look for the cases where bad input was given, i.e. this would fail
-    # in regular dplyr anyway, and let those raise those as errors;
-    # else, for things not supported by Arrow return a "try-error",
-    # which we'll handle differently
-    msg <- conditionMessage(e)
-    patterns <- dplyr_functions$i18ized_error_pattern
-    if (is.null(patterns)) {
-      patterns <- i18ize_error_messages()
-      # Memoize it
-      dplyr_functions$i18ized_error_pattern <- patterns
-    }
-    if (grepl(patterns, msg)) {
-      stop(e)
-    }
-    invisible(structure(msg, class = "try-error", condition = e))
-  })
-}
-
-i18ize_error_messages <- function() {
-  # Figure out what the error messages will be with this LANGUAGE
-  # so that we can look for them
-  out <- list(
-    obj = tryCatch(eval(parse(text = "X_____X")), error = function(e) conditionMessage(e)),
-    fun = tryCatch(eval(parse(text = "X_____X()")), error = function(e) conditionMessage(e))
-  )
-  paste(map(out, ~sub("X_____X", ".*", .)), collapse = "|")
-}
-
-# Helper to assemble the functions that go in the NSE data mask
-# The only difference between the Dataset and the Table/RecordBatch versions
-# is that they use a different wrapping function (FUN) to hold the unevaluated
-# expression.
-build_function_list <- function(FUN) {
-  wrapper <- function(operator) {
-    force(operator)
-    function(...) FUN(operator, ...)
-  }
-  all_arrow_funs <- list_compute_functions()
-
-  c(
-    # Include mappings from R function name spellings
-    lapply(set_names(names(.array_function_map)), wrapper),
-    # Plus some special handling where it's not 1:1
-    cast = function(x, target_type, safe = TRUE, ...) {
-      opts <- cast_options(safe, ...)
-      opts$to_type <- as_type(target_type)
-      FUN("cast", x, options = opts)
-    },
-    dictionary_encode = function(x, null_encoding_behavior = c("mask", "encode")) {
-      null_encoding_behavior <-
-        NullEncodingBehavior[[toupper(match.arg(null_encoding_behavior))]]
-      FUN(
-        "dictionary_encode",
-        x,
-        options = list(null_encoding_behavior = null_encoding_behavior)
-      )
-    },
-    # as.factor() is mapped in expression.R
-    as.character = function(x) {
-      FUN("cast", x, options = cast_options(to_type = string()))
-    },
-    as.double = function(x) {
-      FUN("cast", x, options = cast_options(to_type = float64()))
-    },
-    as.integer = function(x) {
-      FUN(
-        "cast",
-        x,
-        options = cast_options(
-          to_type = int32(),
-          allow_float_truncate = TRUE,
-          allow_decimal_truncate = TRUE
-        )
-      )
-    },
-    as.integer64 = function(x) {
-      FUN(
-        "cast",
-        x,
-        options = cast_options(
-          to_type = int64(),
-          allow_float_truncate = TRUE,
-          allow_decimal_truncate = TRUE
-        )
-      )
-    },
-    as.logical = function(x) {
-      FUN("cast", x, options = cast_options(to_type = boolean()))
-    },
-    as.numeric = function(x) {
-      FUN("cast", x, options = cast_options(to_type = float64()))
-    },
-    nchar = function(x, type = "chars", allowNA = FALSE, keepNA = NA) {
-      if (allowNA) {
-        stop("allowNA = TRUE not supported for Arrow", call. = FALSE)
-      }
-      if (is.na(keepNA)) {
-        keepNA <- !identical(type, "width")
-      }
-      if (!keepNA) {
-        # TODO: I think there is a fill_null kernel we could use, set null to 2
-        stop("keepNA = TRUE not supported for Arrow", call. = FALSE)
-      }
-      if (identical(type, "bytes")) {
-        FUN("binary_length", x)
-      } else {
-        FUN("utf8_length", x)
-      }
-    },
-    str_trim = function(string, side = c("both", "left", "right")) {
-      side <- match.arg(side)
-      switch(
-        side,
-        left = FUN("utf8_ltrim_whitespace", string),
-        right = FUN("utf8_rtrim_whitespace", string),
-        both = FUN("utf8_trim_whitespace", string)
-      )
-    },
-    grepl = arrow_r_string_match_function(FUN),
-    str_detect = arrow_stringr_string_match_function(FUN),
-    sub = arrow_r_string_replace_function(FUN, 1L),
-    gsub = arrow_r_string_replace_function(FUN, -1L),
-    str_replace = arrow_stringr_string_replace_function(FUN, 1L),
-    str_replace_all = arrow_stringr_string_replace_function(FUN, -1L),
-    between = function(x, left, right) {
-      x >= left & x <= right
-    },
-    # Now also include all available Arrow Compute functions,
-    # namespaced as arrow_fun
-    set_names(
-      lapply(all_arrow_funs, wrapper),
-      paste0("arrow_", all_arrow_funs)
-    )
-  )
-}
-
-arrow_r_string_match_function <- function(FUN) {
-  function(pattern, x, ignore.case = FALSE, fixed = FALSE) {
-    FUN(
-      ifelse(fixed && !ignore.case, "match_substring", "match_substring_regex"),
-      x,
-      options = list(pattern = format_string_pattern(pattern, ignore.case, fixed))
-    )
-  }
-}
-
-arrow_stringr_string_match_function <- function(FUN) {
-  function(string, pattern, negate = FALSE) {
-    opts <- get_stringr_pattern_options(enexpr(pattern))
-    out <- arrow_r_string_match_function(FUN)(
-      pattern = opts$pattern,
-      x = string,
-      ignore.case = opts$ignore_case,
-      fixed = opts$fixed
-    )
-    if (negate) out <- FUN("invert", out)
-    out
-  }
-}
-
-arrow_r_string_replace_function <- function(FUN, max_replacements) {
-  function(pattern, replacement, x, ignore.case = FALSE, fixed = FALSE) {
-    FUN(
-      ifelse(fixed && !ignore.case, "replace_substring", "replace_substring_regex"),
-      x,
-      options = list(
-        pattern = format_string_pattern(pattern, ignore.case, fixed),
-        replacement =  format_string_replacement(replacement, ignore.case, fixed),
-        max_replacements = max_replacements
-      )
-    )
-  }
-}
-
-arrow_stringr_string_replace_function <- function(FUN, max_replacements) {
-  function(string, pattern, replacement) {
-    opts <- get_stringr_pattern_options(enexpr(pattern))
-    arrow_r_string_replace_function(FUN, max_replacements)(
-      pattern = opts$pattern,
-      replacement = replacement,
-      x = string,
-      ignore.case = opts$ignore_case,
-      fixed = opts$fixed
-    )
-  }
-}
-
-# format `pattern` as needed for case insensitivity and literal matching by RE2
-format_string_pattern <- function(pattern, ignore.case, fixed) {
-  # Arrow lacks native support for case-insensitive literal string matching and
-  # replacement, so we use the regular expression engine (RE2) to do this.
-  # https://github.com/google/re2/wiki/Syntax
-  if (ignore.case) {
-    if (fixed) {
-      # Everything between "\Q" and "\E" is treated as literal text.
-      # If the search text contains any literal "\E" strings, make them
-      # lowercase so they won't signal the end of the literal text:
-      pattern <- gsub("\\E", "\\e", pattern, fixed = TRUE)
-      pattern <- paste0("\\Q", pattern, "\\E")
-    }
-    # Prepend "(?i)" for case-insensitive matching
-    pattern <- paste0("(?i)", pattern)
-  }
-  pattern
-}
-
-# format `replacement` as needed for literal replacement by RE2
-format_string_replacement <- function(replacement, ignore.case, fixed) {
-  # Arrow lacks native support for case-insensitive literal string
-  # replacement, so we use the regular expression engine (RE2) to do this.
-  # https://github.com/google/re2/wiki/Syntax
-  if (ignore.case && fixed) {
-    # Escape single backslashes in the regex replacement text so they are
-    # interpreted as literal backslashes:
-    replacement <- gsub("\\", "\\\\", replacement, fixed = TRUE)
-  }
-  replacement
-}
-
-# this function assigns definitions for the stringr pattern modifier functions
-# (fixed, regex, etc.) in itself, and uses them to evaluate the quoted
-# expression `pattern`
-get_stringr_pattern_options <- function(pattern) {
-  fixed <- function(pattern, ignore_case = FALSE, ...) {
-    check_dots(...)
-    list(pattern = pattern, fixed = TRUE, ignore_case = ignore_case)
-  }
-  regex <- function(pattern, ignore_case = FALSE, ...) {
-    check_dots(...)
-    list(pattern = pattern, fixed = FALSE, ignore_case = ignore_case)
-  }
-  coll <- boundary <- function(...) {
-    stop(
-      "Pattern modifier `",
-      match.call()[[1]],
-      "()` is not supported in Arrow",
-      call. = FALSE
-    )
-  }
-  check_dots <- function(...) {
-    dots <- list(...)
-    if (length(dots)) {
-      warning(
-        "Ignoring pattern modifier ",
-        ngettext(length(dots), "argument ", "arguments "),
-        "not supported in Arrow: ",
-        oxford_paste(names(dots)),
-        call. = FALSE
-      )
-    }
-  }
-  ensure_opts <- function(opts) {
-    if (is.character(opts)) {
-      opts <- list(pattern = opts, fixed = TRUE, ignore_case = FALSE)
-    }
-    opts
-  }
-  ensure_opts(eval(pattern))
-}
-
-# We'll populate these at package load time.
-dplyr_functions <- NULL
-init_env <- function () {
-  dplyr_functions <<- new.env(hash = TRUE)
-}
-init_env()
-
-# Create a data mask for evaluating a dplyr expression
-arrow_mask <- function(.data) {
-  if (query_on_dataset(.data)) {
-    f_env <- new_environment(dplyr_functions$dataset)
-  } else {
-    f_env <- new_environment(dplyr_functions$array)
-  }
-
-  # Add functions that need to error hard and clear.
-  # Some R functions will still try to evaluate on an Expression
-  # and return NA with a warning
-  fail <- function(...) stop("Not implemented")
-  for (f in c("mean")) {
-    f_env[[f]] <- fail
-  }
-
-  # Add the column references and make the mask
-  out <- new_data_mask(
-    new_environment(.data$selected_columns, parent = f_env),
-    f_env
-  )
-  # Then insert the data pronoun
-  # TODO: figure out what rlang::as_data_pronoun does/why we should use it
-  # (because if we do we get `Error: Can't modify the data pronoun` in mutate())
-  out$.data <- .data$selected_columns
-  out
-}
-
-set_filters <- function(.data, expressions) {
-  if (length(expressions)) {
-    # expressions is a list of Expressions. AND them together and set them on .data
-    new_filter <- Reduce("&", expressions)
-    if (isTRUE(.data$filtered_rows)) {
-      # TRUE is default (i.e. no filter yet), so we don't need to & with it
-      .data$filtered_rows <- new_filter
-    } else {
-      .data$filtered_rows <- .data$filtered_rows & new_filter
-    }
-  }
-  .data
-}
-
-collect.arrow_dplyr_query <- function(x, as_data_frame = TRUE, ...) {
-  x <- ensure_group_vars(x)
-  x <- ensure_arrange_vars(x) # this sets x$temp_columns
-  # Pull only the selected rows and cols into R
-  if (query_on_dataset(x)) {
-    # See dataset.R for Dataset and Scanner(Builder) classes
-    tab <- Scanner$create(x)$ToTable()
-  } else {
-    # This is a Table or RecordBatch
-
-    # Filter and select the data referenced in selected columns
-    if (isTRUE(x$filtered_rows)) {
-      filter <- TRUE
-    } else {
-      filter <- eval_array_expression(x$filtered_rows, x$.data)
-    }
-    # TODO: shortcut if identical(names(x$.data), find_array_refs(c(x$selected_columns, x$temp_columns)))?
-    tab <- x$.data[
-      filter,
-      find_array_refs(c(x$selected_columns, x$temp_columns)),
-      keep_na = FALSE
-    ]
-    # Now evaluate those expressions on the filtered table
-    cols <- lapply(c(x$selected_columns, x$temp_columns), eval_array_expression, data = tab)
-    if (length(cols) == 0) {
-      tab <- tab[, integer(0)]
-    } else {
-      if (inherits(x$.data, "Table")) {
-        tab <- Table$create(!!!cols)
-      } else {
-        tab <- RecordBatch$create(!!!cols)
-      }
-    }
-  }
-  # Arrange rows
-  if (length(x$arrange_vars) > 0) {
-    tab <- tab[
-      tab$SortIndices(names(x$arrange_vars), x$arrange_desc),
-      names(x$selected_columns), # this omits x$temp_columns from the result
-      drop = FALSE
-    ]
-  }
-  if (as_data_frame) {
-    df <- as.data.frame(tab)
-    tab$invalidate()
-    restore_dplyr_features(df, x)
-  } else {
-    restore_dplyr_features(tab, x)
-  }
-}
-collect.ArrowTabular <- function(x, as_data_frame = TRUE, ...) {
-  if (as_data_frame) {
-    as.data.frame(x, ...)
-  } else {
-    x
-  }
-}
-collect.Dataset <- function(x, ...) dplyr::collect(arrow_dplyr_query(x), ...)
-
-compute.arrow_dplyr_query <- function(x, ...) dplyr::collect(x, as_data_frame = FALSE)
-compute.ArrowTabular <- function(x, ...) x
-compute.Dataset <- compute.arrow_dplyr_query
-
-ensure_group_vars <- function(x) {
-  if (inherits(x, "arrow_dplyr_query")) {
-    # Before pulling data from Arrow, make sure all group vars are in the projection
-    gv <- set_names(setdiff(dplyr::group_vars(x), names(x)))
-    if (length(gv)) {
-      # Add them back
-      x$selected_columns <- c(
-        x$selected_columns,
-        make_field_refs(gv, dataset = query_on_dataset(.data))
-      )
-    }
-  }
-  x
-}
-
-ensure_arrange_vars <- function(x) {
-  # The arrange() operation is not performed until later, because:
-  # - It must be performed after mutate(), to enable sorting by new columns.
-  # - It should be performed after filter() and select(), for efficiency.
-  # However, we need users to be able to arrange() by columns and expressions
-  # that are *not* returned in the query result. To enable this, we must
-  # *temporarily* include these columns and expressions in the projection. We
-  # use x$temp_columns to store these. Later, after the arrange() operation has
-  # been performed, these are omitted from the result. This differs from the
-  # columns in x$group_by_vars which *are* returned in the result.
-  x$temp_columns <- x$arrange_vars[!names(x$arrange_vars) %in% names(x$selected_columns)]
-  x
-}
-
-restore_dplyr_features <- function(df, query) {
-  # An arrow_dplyr_query holds some attributes that Arrow doesn't know about
-  # After calling collect(), make sure these features are carried over
-
-  grouped <- length(query$group_by_vars) > 0
-  renamed <- ncol(df) && !identical(names(df), names(query))
-  if (renamed) {
-    # In case variables were renamed, apply those names
-    names(df) <- names(query)
-  }
-  if (grouped) {
-    # Preserve groupings, if present
-    if (is.data.frame(df)) {
-      df <- dplyr::grouped_df(
-        df,
-        dplyr::group_vars(query),
-        drop = dplyr::group_by_drop_default(query)
-      )
-    } else {
-      # This is a Table, via compute() or collect(as_data_frame = FALSE)
-      df <- arrow_dplyr_query(df)
-      df$group_by_vars <- query$group_by_vars
-      df$drop_empty_groups <- query$drop_empty_groups
-    }
-  }
-  df
-}
-
-pull.arrow_dplyr_query <- function(.data, var = -1) {
-  .data <- arrow_dplyr_query(.data)
-  var <- vars_pull(names(.data), !!enquo(var))
-  .data$selected_columns <- set_names(.data$selected_columns[var], var)
-  dplyr::collect(.data)[[1]]
-}
-pull.Dataset <- pull.ArrowTabular <- pull.arrow_dplyr_query
-
-summarise.arrow_dplyr_query <- function(.data, ...) {
-  call <- match.call()
-  .data <- arrow_dplyr_query(.data)
-  if (query_on_dataset(.data)) {
-    not_implemented_for_dataset("summarize()")
-  }
-  exprs <- quos(...)
-  # Only retain the columns we need to do our aggregations
-  vars_to_keep <- unique(c(
-    unlist(lapply(exprs, all.vars)), # vars referenced in summarise
-    dplyr::group_vars(.data)             # vars needed for grouping
-  ))
-  .data <- dplyr::select(.data, vars_to_keep)
-  if (isTRUE(getOption("arrow.summarize", FALSE))) {
-    # Try stuff, if successful return()
-    out <- try(do_arrow_group_by(.data, ...), silent = TRUE)
-    if (inherits(out, "try-error")) {
-      return(abandon_ship(call, .data, format(out)))
-    } else {
-      return(out)
-    }
-  } else {
-    # If unsuccessful or if option not set, do the work in R
-    dplyr::summarise(dplyr::collect(.data), ...)
-  }
-}
-summarise.Dataset <- summarise.ArrowTabular <- summarise.arrow_dplyr_query
-
-do_arrow_group_by <- function(.data, ...) {
-  exprs <- quos(...)
-  mask <- arrow_mask(.data)
-  # Add aggregation wrappers to arrow_mask somehow
-  # (this is not ideal, would overwrite same-named objects)
-  mask$sum <- function(x, na.rm = FALSE) {
-    list(
-      fun = "sum",
-      data = x,
-      options = list(na.rm = na.rm)
-    )
-  }
-  results <- list()
-  for (i in seq_along(exprs)) {
-    # Iterate over the indices and not the names because names may be repeated
-    # (which overwrites the previous name)
-    new_var <- names(exprs)[i]
-    results[[new_var]] <- arrow_eval(exprs[[i]], mask)
-    if (inherits(results[[new_var]], "try-error")) {
-      msg <- paste('Expression', as_label(exprs[[i]]), 'not supported in Arrow')
-      stop(msg, call. = FALSE)
-    }
-    # Put it in the data mask too?
-    #mask[[new_var]] <- mask$.data[[new_var]] <- results[[new_var]]
-  }
-  # Now, from that, split out the array (expressions) and options
-  opts <- lapply(results, function(x) x[c("fun", "options")])
-  inputs <- lapply(results, function(x) eval_array_expression(x$data, .data$.data))
-  grouping_vars <- lapply(.data$group_by_vars, function(x) eval_array_expression(.data$selected_columns[[x]], .data$.data))
-  compute__GroupBy(inputs, grouping_vars, opts)
-}
-
-group_by.arrow_dplyr_query <- function(.data,
-                                       ...,
-                                       .add = FALSE,
-                                       add = .add,
-                                       .drop = dplyr::group_by_drop_default(.data)) {
-  .data <- arrow_dplyr_query(.data)
-  # ... can contain expressions (i.e. can add (or rename?) columns)
-  # Check for those (they show up as named expressions)
-  new_groups <- enquos(...)
-  new_groups <- new_groups[nzchar(names(new_groups))]
-  if (length(new_groups)) {
-    # Add them to the data
-    .data <- dplyr::mutate(.data, !!!new_groups)
-  }
-  if (".add" %in% names(formals(dplyr::group_by))) {
-    # dplyr >= 1.0
-    gv <- dplyr::group_by_prepare(.data, ..., .add = .add)$group_names
-  } else {
-    gv <- dplyr::group_by_prepare(.data, ..., add = add)$group_names
-  }
-  .data$group_by_vars <- gv
-  .data$drop_empty_groups <- ifelse(length(gv), .drop, dplyr::group_by_drop_default(.data))
-  .data
-}
-group_by.Dataset <- group_by.ArrowTabular <- group_by.arrow_dplyr_query
-
-groups.arrow_dplyr_query <- function(x) syms(dplyr::group_vars(x))
-groups.Dataset <- groups.ArrowTabular <- function(x) NULL
-
-group_vars.arrow_dplyr_query <- function(x) x$group_by_vars
-group_vars.Dataset <- group_vars.ArrowTabular <- function(x) NULL
-
-# the logical literal in the two functions below controls the default value of
-# the .drop argument to group_by()
-group_by_drop_default.arrow_dplyr_query <-
-  function(.tbl) .tbl$drop_empty_groups %||% TRUE
-group_by_drop_default.Dataset <- group_by_drop_default.ArrowTabular <-
-  function(.tbl) TRUE
-
-ungroup.arrow_dplyr_query <- function(x, ...) {
-  x$group_by_vars <- character()
-  x$drop_empty_groups <- NULL
-  x
-}
-ungroup.Dataset <- ungroup.ArrowTabular <- force
-
-mutate.arrow_dplyr_query <- function(.data,
-                                     ...,
-                                     .keep = c("all", "used", "unused", "none"),
-                                     .before = NULL,
-                                     .after = NULL) {
-  call <- match.call()
-  exprs <- quos(...)
-
-  .keep <- match.arg(.keep)
-  .before <- enquo(.before)
-  .after <- enquo(.after)
-
-  if (.keep %in% c("all", "unused") && length(exprs) == 0) {
-    # Nothing to do
-    return(.data)
-  }
-
-  .data <- arrow_dplyr_query(.data)
-
-  # Restrict the cases we support for now
-  if (length(dplyr::group_vars(.data)) > 0) {
-    # mutate() on a grouped dataset does calculations within groups
-    # This doesn't matter on scalar ops (arithmetic etc.) but it does
-    # for things with aggregations (e.g. subtracting the mean)
-    return(abandon_ship(call, .data, 'mutate() on grouped data not supported in Arrow'))
-  }
-
-  # Check for unnamed expressions and fix if any
-  unnamed <- !nzchar(names(exprs))
-  # Deparse and take the first element in case they're long expressions
-  names(exprs)[unnamed] <- map_chr(exprs[unnamed], as_label)
-
-  is_dataset <- query_on_dataset(.data)
-  mask <- arrow_mask(.data)
-  results <- list()
-  for (i in seq_along(exprs)) {
-    # Iterate over the indices and not the names because names may be repeated
-    # (which overwrites the previous name)
-    new_var <- names(exprs)[i]
-    results[[new_var]] <- arrow_eval(exprs[[i]], mask)
-    if (inherits(results[[new_var]], "try-error")) {
-      msg <- paste('Expression', as_label(exprs[[i]]), 'not supported in Arrow')
-      return(abandon_ship(call, .data, msg))
-    } else if (is_dataset &&
-               !inherits(results[[new_var]], "Expression") &&
-               !is.null(results[[new_var]])) {
-      # We need some wrapping to handle literal values
-      if (length(results[[new_var]]) != 1) {
-        msg <- paste0('In ', new_var, " = ", as_label(exprs[[i]]), ", only values of size one are recycled")
-        return(abandon_ship(call, .data, msg))
-      }
-      results[[new_var]] <- Expression$scalar(results[[new_var]])
-    }
-    # Put it in the data mask too
-    mask[[new_var]] <- mask$.data[[new_var]] <- results[[new_var]]
-  }
-
-  old_vars <- names(.data$selected_columns)
-  # Note that this is names(exprs) not names(results):
-  # if results$new_var is NULL, that means we are supposed to remove it
-  new_vars <- names(exprs)
-
-  # Assign the new columns into the .data$selected_columns
-  for (new_var in new_vars) {
-    .data$selected_columns[[new_var]] <- results[[new_var]]
-  }
-
-  # Deduplicate new_vars and remove NULL columns from new_vars
-  new_vars <- intersect(new_vars, names(.data$selected_columns))
-
-  # Respect .before and .after
-  if (!quo_is_null(.before) || !quo_is_null(.after)) {
-    new <- setdiff(new_vars, old_vars)
-    .data <- dplyr::relocate(.data, !!new, .before = !!.before, .after = !!.after)
-  }
-
-  # Respect .keep
-  if (.keep == "none") {
-    .data$selected_columns <- .data$selected_columns[new_vars]
-  } else if (.keep != "all") {
-    # "used" or "unused"
-    used_vars <- unlist(lapply(exprs, all.vars), use.names = FALSE)
-    if (.keep == "used") {
-      .data$selected_columns[setdiff(old_vars, used_vars)] <- NULL
-    } else {
-      # "unused"
-      .data$selected_columns[intersect(old_vars, used_vars)] <- NULL
-    }
-  }
-  # Even if "none", we still keep group vars
-  ensure_group_vars(.data)
-}
-mutate.Dataset <- mutate.ArrowTabular <- mutate.arrow_dplyr_query
-
-transmute.arrow_dplyr_query <- function(.data, ...) dplyr::mutate(.data, ..., .keep = "none")
-transmute.Dataset <- transmute.ArrowTabular <- transmute.arrow_dplyr_query
-
-# Helper to handle unsupported dplyr features
-# * For Table/RecordBatch, we collect() and then call the dplyr method in R
-# * For Dataset, we just error
-abandon_ship <- function(call, .data, msg = NULL) {
-  dplyr_fun_name <- sub("^(.*?)\\..*", "\\1", as.character(call[[1]]))
-  if (query_on_dataset(.data)) {
-    if (is.null(msg)) {
-      # Default message: function not implemented
-      not_implemented_for_dataset(paste0(dplyr_fun_name, "()"))
-    } else {
-      stop(msg, "\nCall collect() first to pull data into R.", call. = FALSE)
-    }
-  }
-
-  # else, collect and call dplyr method
-  if (!is.null(msg)) {
-    warning(msg, "; pulling data into R", immediate. = TRUE, call. = FALSE)
-  }
-  call$.data <- dplyr::collect(.data)
-  call[[1]] <- get(dplyr_fun_name, envir = asNamespace("dplyr"))
-  eval.parent(call, 2)
-}
-
-arrange.arrow_dplyr_query <- function(.data, ..., .by_group = FALSE) {
-  call <- match.call()
-  exprs <- quos(...)
-  if (.by_group) {
-    # when the data is is grouped and .by_group is TRUE, order the result by
-    # the grouping columns first
-    exprs <- c(quos(!!!dplyr::groups(.data)), exprs)
-  }
-  if (length(exprs) == 0) {
-    # Nothing to do
-    return(.data)
-  }
-  .data <- arrow_dplyr_query(.data)
-  # find and remove any dplyr::desc() and tidy-eval
-  # the arrange expressions inside an Arrow data_mask
-  sorts <- vector("list", length(exprs))
-  descs <- logical(0)
-  mask <- arrow_mask(.data)
-  for (i in seq_along(exprs)) {
-    x <- find_and_remove_desc(exprs[[i]])
-    exprs[[i]] <- x[["quos"]]
-    sorts[[i]] <- arrow_eval(exprs[[i]], mask)
-    if (inherits(sorts[[i]], "try-error")) {
-      msg <- paste('Expression', as_label(exprs[[i]]), 'not supported in Arrow')
-      return(abandon_ship(call, .data, msg))
-    }
-    names(sorts)[i] <- as_label(exprs[[i]])
-    descs[i] <- x[["desc"]]
-  }
-  .data$arrange_vars <- c(sorts, .data$arrange_vars)
-  .data$arrange_desc <- c(descs, .data$arrange_desc)
-  .data
-}
-arrange.Dataset <- arrange.ArrowTabular <- arrange.arrow_dplyr_query
-
-# Helper to handle desc() in arrange()
-# * Takes a quosure as input
-# * Returns a list with two elements:
-#   1. The quosure with any wrapping parentheses and desc() removed
-#   2. A logical value indicating whether desc() was found
-# * Performs some other validation
-find_and_remove_desc <- function(quosure) {
-  expr <- quo_get_expr(quosure)
-  descending <- FALSE
-  if (length(all.vars(expr)) < 1L) {
-    stop(
-      "Expression in arrange() does not contain any field names: ",
-      deparse(expr),
-      call. = FALSE
-    )
-  }
-  # Use a while loop to remove any number of nested pairs of enclosing
-  # parentheses and any number of nested desc() calls. In the case of multiple
-  # nested desc() calls, each one toggles the sort order.
-  while (identical(typeof(expr), "language") && is.call(expr)) {
-    if (identical(expr[[1]], quote(`(`))) {
-      # remove enclosing parentheses
-      expr <- expr[[2]]
-    } else if (identical(expr[[1]], quote(desc))) {
-      # remove desc() and toggle descending
-      expr <- expr[[2]]
-      descending <- !descending
-    } else {
-      break
-    }
-  }
-  return(
-    list(
-      quos = quo_set_expr(quosure, expr),
-      desc = descending
-    )
-  )
-}
-
-query_on_dataset <- function(x) inherits(x$.data, "Dataset")
-
-not_implemented_for_dataset <- function(method) {
-  stop(
-    method, " is not currently implemented for Arrow Datasets. ",
-    "Call collect() first to pull data into R.",
-    call. = FALSE
-  )
-}
diff --git a/r/R/enums.R b/r/R/enums.R
deleted file mode 100644
index ae44ccf..0000000
--- a/r/R/enums.R
+++ /dev/null
@@ -1,142 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @export
-`print.arrow-enum` <- function(x, ...){
-  NextMethod()
-}
-
-enum <- function(class, ..., .list = list(...)){
-  structure(
-    .list,
-    class = c(class, "arrow-enum")
-  )
-}
-
-#' Arrow enums
-#' @name enums
-#' @export
-#' @keywords internal
-TimeUnit <- enum("TimeUnit::type",
-  SECOND = 0L, MILLI = 1L, MICRO = 2L, NANO = 3L
-)
-
-#' @rdname enums
-#' @export
-DateUnit <- enum("DateUnit", DAY = 0L, MILLI = 1L)
-
-#' @rdname enums
-#' @export
-Type <- enum("Type::type",
-  "NA" = 0L,
-  BOOL = 1L,
-  UINT8 = 2L,
-  INT8 = 3L,
-  UINT16 = 4L,
-  INT16 = 5L,
-  UINT32 = 6L,
-  INT32 = 7L,
-  UINT64 = 8L,
-  INT64 = 9L,
-  HALF_FLOAT = 10L,
-  FLOAT = 11L,
-  DOUBLE = 12L,
-  STRING = 13L,
-  BINARY = 14L,
-  FIXED_SIZE_BINARY = 15L,
-  DATE32 = 16L,
-  DATE64 = 17L,
-  TIMESTAMP = 18L,
-  TIME32 = 19L,
-  TIME64 = 20L,
-  INTERVAL_MONTHS = 21L,
-  INTERVAL_DAY_TIME = 22L,
-  DECIMAL = 23L,
-  DECIMAL256 = 24L,
-  LIST = 25L,
-  STRUCT = 26L,
-  SPARSE_UNION = 27L,
-  DENSE_UNION = 28L,
-  DICTIONARY = 29L,
-  MAP = 30L,
-  EXTENSION = 31L,
-  FIXED_SIZE_LIST = 32L,
-  DURATION = 33L,
-  LARGE_STRING = 34L,
-  LARGE_BINARY = 35L,
-  LARGE_LIST = 36L
-)
-
-#' @rdname enums
-#' @export
-StatusCode <- enum("StatusCode",
-  OK = 0L, OutOfMemory = 1L, KeyError = 2L, TypeError = 3L,
-  Invalid = 4L, IOError = 5L, CapacityError = 6L, IndexError = 7L,
-  UnknownError = 9L, NotImplemented = 10L, SerializationError = 11L,
-  PythonError = 12L, RError = 13L,
-  PlasmaObjectExists = 20L, PlasmaObjectNotFound = 21L,
-  PlasmaStoreFull = 22L, PlasmaObjectAlreadySealed = 23L
-)
-
-#' @rdname enums
-#' @export
-FileMode <- enum("FileMode",
-  READ = 0L, WRITE = 1L, READWRITE = 2L
-)
-
-#' @rdname enums
-#' @export
-MessageType <- enum("MessageType",
-  NONE = 0L, SCHEMA = 1L, DICTIONARY_BATCH = 2L, RECORD_BATCH = 3L, TENSOR = 4L
-)
-
-#' @rdname enums
-#' @export
-CompressionType <- enum("Compression::type",
-  UNCOMPRESSED = 0L, SNAPPY = 1L, GZIP = 2L, BROTLI = 3L, ZSTD = 4L, LZ4 = 5L,
-  LZ4_FRAME = 6L, LZO = 7L, BZ2 = 8L
-)
-
-#' @export
-#' @rdname enums
-FileType <- enum("FileType",
-  NotFound = 0L, Unknown = 1L, File = 2L, Directory = 3L
-)
-
-#' @export
-#' @rdname enums
-ParquetVersionType <- enum("ParquetVersionType",
-  PARQUET_1_0 = 0L, PARQUET_2_0 = 1L
-)
-
-#' @export
-#' @rdname enums
-MetadataVersion <- enum("MetadataVersion",
-  V1 = 0L, V2 = 1L, V3 = 2L, V4 = 3L, V5 = 4L
-)
-
-#' @export
-#' @rdname enums
-QuantileInterpolation <- enum("QuantileInterpolation",
-  LINEAR = 0L, LOWER = 1L, HIGHER = 2L, NEAREST = 3L, MIDPOINT = 4L
-)
-
-#' @export
-#' @rdname enums
-NullEncodingBehavior <- enum("NullEncodingBehavior",
-  ENCODE = 0L, MASK = 1L
-)
diff --git a/r/R/expression.R b/r/R/expression.R
deleted file mode 100644
index b3fdd52..0000000
--- a/r/R/expression.R
+++ /dev/null
@@ -1,346 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include arrowExports.R
-
-array_expression <- function(FUN,
-                             ...,
-                             args = list(...),
-                             options = empty_named_list()) {
-  structure(
-    list(
-      fun = FUN,
-      args = args,
-      options = options
-    ),
-    class = "array_expression"
-  )
-}
-
-#' @export
-Ops.ArrowDatum <- function(e1, e2) {
-  if (.Generic == "!") {
-    eval_array_expression(build_array_expression(.Generic, e1))
-  } else if (.Generic %in% names(.array_function_map)) {
-    eval_array_expression(build_array_expression(.Generic, e1, e2))
-  } else {
-    stop(paste0("Unsupported operation on `", class(e1)[1L], "` : "), .Generic, call. = FALSE)
-  }
-}
-
-#' @export
-Ops.array_expression <- function(e1, e2) {
-  if (.Generic == "!") {
-    build_array_expression(.Generic, e1)
-  } else {
-    build_array_expression(.Generic, e1, e2)
-  }
-}
-
-build_array_expression <- function(FUN,
-                                   ...,
-                                   args = list(...),
-                                   options = empty_named_list()) {
-  if (FUN == "-" && length(args) == 1L) {
-    # Unary -, i.e. just make it negative, and somehow this works
-    if (inherits(args[[1]], c("ArrowObject", "array_expression"))) {
-      # Make it be 0 - arg
-      # TODO(ARROW-11950): do this in C++ compute
-      args <- list(0L, args[[1]])
-    } else {
-      # Somehow this works
-      return(-args[[1]])
-    }
-  }
-  args <- lapply(args, .wrap_arrow, FUN)
-
-  # In Arrow, "divide" is one function, which does integer division on
-  # integer inputs and floating-point division on floats
-  if (FUN == "/") {
-    # TODO: omg so many ways it's wrong to assume these types
-    args <- lapply(args, cast_array_expression, float64())
-  } else if (FUN == "%/%") {
-    # In R, integer division works like floor(float division)
-    out <- build_array_expression("/", args = args, options = options)
-    return(cast_array_expression(out, int32(), allow_float_truncate = TRUE))
-  } else if (FUN == "%%") {
-    # {e1 - e2 * ( e1 %/% e2 )}
-    # ^^^ form doesn't work because Ops.Array evaluates eagerly,
-    # but we can build that up
-    quotient <- build_array_expression("%/%", args = args)
-    base <- build_array_expression("*", quotient, args[[2]])
-    # this cast is to ensure that the result of this and e1 are the same
-    # (autocasting only applies to scalars)
-    base <- cast_array_expression(base, args[[1]]$type)
-    return(build_array_expression("-", args[[1]], base))
-  }
-
-  array_expression(.array_function_map[[FUN]] %||% FUN, args = args, options = options)
-}
-
-cast_array_expression <- function(x, to_type, safe = TRUE, ...) {
-  opts <- list(
-    to_type = to_type,
-    allow_int_overflow = !safe,
-    allow_time_truncate = !safe,
-    allow_float_truncate = !safe
-  )
-  array_expression("cast", x, options = modifyList(opts, list(...)))
-}
-
-.wrap_arrow <- function(arg, fun) {
-  if (!inherits(arg, c("ArrowObject", "array_expression"))) {
-    # TODO: Array$create if lengths are equal?
-    # TODO: these kernels should autocast like the dataset ones do (e.g. int vs. float)
-    if (fun == "%in%") {
-      arg <- Array$create(arg)
-    } else {
-      arg <- Scalar$create(arg)
-    }
-  }
-  arg
-}
-
-.unary_function_map <- list(
-  "!" = "invert",
-  "as.factor" = "dictionary_encode",
-  "is.na" = "is_null",
-  "is.nan" = "is_nan",
-  # nchar is defined in dplyr.R because it is more complex
-  # "nchar" = "utf8_length",
-  "tolower" = "utf8_lower",
-  "toupper" = "utf8_upper",
-  # stringr spellings of those
-  "str_length" = "utf8_length",
-  "str_to_lower" = "utf8_lower",
-  "str_to_upper" = "utf8_upper"
-  # str_trim is defined in dplyr.R
-)
-
-.binary_function_map <- list(
-  "==" = "equal",
-  "!=" = "not_equal",
-  ">" = "greater",
-  ">=" = "greater_equal",
-  "<" = "less",
-  "<=" = "less_equal",
-  "&" = "and_kleene",
-  "|" = "or_kleene",
-  "+" = "add_checked",
-  "-" = "subtract_checked",
-  "*" = "multiply_checked",
-  "/" = "divide_checked",
-  "%/%" = "divide_checked",
-  # we don't actually use divide_checked with `%%`, rather it is rewritten to
-  # use %/% above.
-  "%%" = "divide_checked",
-  "^" = "power_checked",
-  "%in%" = "is_in_meta_binary"
-)
-
-.array_function_map <- c(.unary_function_map, .binary_function_map)
-
-eval_array_expression <- function(x, data = NULL) {
-  if (!is.null(data)) {
-    x <- bind_array_refs(x, data)
-  }
-  if (!inherits(x, "array_expression")) {
-    # Nothing to evaluate
-    return(x)
-  }
-  x$args <- lapply(x$args, function (a) {
-    if (inherits(a, "array_expression")) {
-      eval_array_expression(a)
-    } else {
-      a
-    }
-  })
-  if (x$fun == "is_in_meta_binary" && inherits(x$args[[2]], "Scalar")) {
-    x$args[[2]] <- Array$create(x$args[[2]])
-  }
-  call_function(x$fun, args = x$args, options = x$options %||% empty_named_list())
-}
-
-find_array_refs <- function(x) {
-  if (identical(x$fun, "array_ref")) {
-    out <- x$args$field_name
-  } else {
-    out <- lapply(x$args, find_array_refs)
-  }
-  unlist(out)
-}
-
-# Take an array_expression and replace array_refs with arrays/chunkedarrays from data
-bind_array_refs <- function(x, data) {
-  if (inherits(x, "array_expression")) {
-    if (identical(x$fun, "array_ref")) {
-      x <- data[[x$args$field_name]]
-    } else {
-      x$args <- lapply(x$args, bind_array_refs, data)
-    }
-  }
-  x
-}
-
-#' @export
-is.na.array_expression <- function(x) array_expression("is.na", x)
-
-#' @export
-as.vector.array_expression <- function(x, ...) {
-  as.vector(eval_array_expression(x))
-}
-
-#' @export
-print.array_expression <- function(x, ...) {
-  cat(.format_array_expression(x), "\n", sep = "")
-  invisible(x)
-}
-
-.format_array_expression <- function(x) {
-  printed_args <- map_chr(x$args, function(arg) {
-    if (inherits(arg, "Scalar")) {
-      deparse(as.vector(arg))
-    } else if (inherits(arg, "ArrowObject")) {
-      paste0("<", class(arg)[1], ">")
-    } else if (inherits(arg, "array_expression")) {
-      .format_array_expression(arg)
-    } else {
-      # Should not happen
-      deparse(arg)
-    }
-  })
-  if (identical(x$fun, "array_ref")) {
-    x$args$field_name
-  } else {
-    # Prune this for readability
-    function_name <- sub("_kleene", "", x$fun)
-    paste0(function_name, "(", paste(printed_args, collapse = ", "), ")")
-  }
-}
-
-###########
-
-#' Arrow expressions
-#'
-#' @description
-#' `Expression`s are used to define filter logic for passing to a [Dataset]
-#' [Scanner].
-#'
-#' `Expression$scalar(x)` constructs an `Expression` which always evaluates to
-#' the provided scalar (length-1) R value.
-#'
-#' `Expression$field_ref(name)` is used to construct an `Expression` which
-#' evaluates to the named column in the `Dataset` against which it is evaluated.
-#'
-#' `Expression$create(function_name, ..., options)` builds a function-call
-#' `Expression` containing one or more `Expression`s.
-#' @name Expression
-#' @rdname Expression
-#' @export
-Expression <- R6Class("Expression", inherit = ArrowObject,
-  public = list(
-    ToString = function() dataset___expr__ToString(self),
-    cast = function(to_type, safe = TRUE, ...) {
-      opts <- list(
-        to_type = to_type,
-        allow_int_overflow = !safe,
-        allow_time_truncate = !safe,
-        allow_float_truncate = !safe
-      )
-      Expression$create("cast", self, options = modifyList(opts, list(...)))
-    }
-  ),
-  active = list(
-    field_name = function() dataset___expr__get_field_ref_name(self)
-  )
-)
-Expression$create <- function(function_name,
-                              ...,
-                              args = list(...),
-                              options = empty_named_list()) {
-  assert_that(is.string(function_name))
-  dataset___expr__call(function_name, args, options)
-}
-Expression$field_ref <- function(name) {
-  assert_that(is.string(name))
-  dataset___expr__field_ref(name)
-}
-Expression$scalar <- function(x) {
-  dataset___expr__scalar(Scalar$create(x))
-}
-
-build_dataset_expression <- function(FUN,
-                                     ...,
-                                     args = list(...),
-                                     options = empty_named_list()) {
-  if (FUN == "-" && length(args) == 1L) {
-    # Unary -, i.e. make it negative
-    if (inherits(args[[1]], c("ArrowObject", "Expression"))) {
-      # TODO(ARROW-11950): do this in C++ compute
-      args <- list(0L, args[[1]])
-    } else {
-      # Somehow this just works
-      return(-args[[1]])
-    }
-  }
-  if (FUN == "%in%") {
-    # Special-case %in%, which is different from the Array function name
-    expr <- Expression$create("is_in", args[[1]],
-      options = list(
-        # If args[[2]] is already an Arrow object (like a scalar),
-        # this wouldn't work
-        value_set = Array$create(args[[2]]),
-        skip_nulls = TRUE
-      )
-    )
-  } else {
-    args <- lapply(args, function(x) {
-      if (!inherits(x, "Expression")) {
-        x <- Expression$scalar(x)
-      }
-      x
-    })
-
-    # In Arrow, "divide" is one function, which does integer division on
-    # integer inputs and floating-point division on floats
-    if (FUN == "/") {
-      # TODO: omg so many ways it's wrong to assume these types
-      args <- lapply(args, function(x) x$cast(float64()))
-    } else if (FUN == "%/%") {
-      # In R, integer division works like floor(float division)
-      out <- build_dataset_expression("/", args = args)
-      return(out$cast(int32(), allow_float_truncate = TRUE))
-    } else if (FUN == "%%") {
-      return(args[[1]] - args[[2]] * ( args[[1]] %/% args[[2]] ))
-    }
-
-    expr <- Expression$create(.array_function_map[[FUN]] %||% FUN, args = args, options = options)
-  }
-  expr
-}
-
-#' @export
-Ops.Expression <- function(e1, e2) {
-  if (.Generic == "!") {
-    build_dataset_expression(.Generic, e1)
-  } else {
-    build_dataset_expression(.Generic, e1, e2)
-  }
-}
-
-#' @export
-is.na.Expression <- function(x) Expression$create("is_null", x)
diff --git a/r/R/feather.R b/r/R/feather.R
deleted file mode 100644
index a978110..0000000
--- a/r/R/feather.R
+++ /dev/null
@@ -1,221 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Write data in the Feather format
-#'
-#' Feather provides binary columnar serialization for data frames.
-#' It is designed to make reading and writing data frames efficient,
-#' and to make sharing data across data analysis languages easy.
-#' This function writes both the original, limited specification of the format
-#' and the version 2 specification, which is the Apache Arrow IPC file format.
-#'
-#' @param x `data.frame`, [RecordBatch], or [Table]
-#' @param sink A string file path, URI, or [OutputStream], or path in a file
-#' system (`SubTreeFileSystem`)
-#' @param version integer Feather file version. Version 2 is the current.
-#' Version 1 is the more limited legacy format.
-#' @param chunk_size For V2 files, the number of rows that each chunk of data
-#' should have in the file. Use a smaller `chunk_size` when you need faster
-#' random row access. Default is 64K. This option is not supported for V1.
-#' @param compression Name of compression codec to use, if any. Default is
-#' "lz4" if LZ4 is available in your build of the Arrow C++ library, otherwise
-#' "uncompressed". "zstd" is the other available codec and generally has better
-#' compression ratios in exchange for slower read and write performance
-#' See [codec_is_available()]. This option is not supported for V1.
-#' @param compression_level If `compression` is "zstd", you may
-#' specify an integer compression level. If omitted, the compression codec's
-#' default compression level is used.
-#'
-#' @return The input `x`, invisibly. Note that if `sink` is an [OutputStream],
-#' the stream will be left open.
-#' @export
-#' @seealso [RecordBatchWriter] for lower-level access to writing Arrow IPC data.
-#' @seealso [Schema] for information about schemas and metadata handling.
-#' @examples
-#' \donttest{
-#' tf <- tempfile()
-#' on.exit(unlink(tf))
-#' write_feather(mtcars, tf)
-#' }
-#' @include arrow-package.R
-write_feather <- function(x,
-                          sink,
-                          version = 2,
-                          chunk_size = 65536L,
-                          compression = c("default", "lz4", "uncompressed", "zstd"),
-                          compression_level = NULL) {
-  # Handle and validate options before touching data
-  version <- as.integer(version)
-  assert_that(version %in% 1:2)
-  compression <- match.arg(compression)
-  chunk_size <- as.integer(chunk_size)
-  assert_that(chunk_size > 0)
-  if (compression == "default") {
-    if (version == 2 && codec_is_available("lz4")) {
-      compression <- "lz4"
-    } else {
-      compression <- "uncompressed"
-    }
-  }
-  if (is.null(compression_level)) {
-    # Use -1 as sentinal for "default"
-    compression_level <- -1L
-  }
-  compression_level <- as.integer(compression_level)
-  # Now make sure that options make sense together
-  if (version == 1) {
-    if (chunk_size != 65536L) {
-      stop("Feather version 1 does not support the 'chunk_size' option", call. = FALSE)
-    }
-    if (compression != "uncompressed") {
-      stop("Feather version 1 does not support the 'compression' option", call. = FALSE)
-    }
-    if (compression_level != -1L) {
-      stop("Feather version 1 does not support the 'compression_level' option", call. = FALSE)
-    }
-  }
-  if (compression != "zstd" && compression_level != -1L) {
-    stop("Can only specify a 'compression_level' when 'compression' is 'zstd'", call. = FALSE)
-  }
-  # Finally, add 1 to version because 2 means V1 and 3 means V2 :shrug:
-  version <- version + 1L
-
-  # "lz4" is the convenience
-  if (compression == "lz4") {
-     compression <- "lz4_frame"
-  }
-
-  compression <- compression_from_name(compression)
-
-  x_out <- x
-  if (is.data.frame(x) || inherits(x, "RecordBatch")) {
-    x <- Table$create(x)
-  }
-  assert_is(x, "Table")
-
-  if (!inherits(sink, "OutputStream")) {
-    sink <- make_output_stream(sink)
-    on.exit(sink$close())
-  }
-  ipc___WriteFeather__Table(sink, x, version, chunk_size, compression, compression_level)
-  invisible(x_out)
-}
-
-#' Read a Feather file
-#'
-#' Feather provides binary columnar serialization for data frames.
-#' It is designed to make reading and writing data frames efficient,
-#' and to make sharing data across data analysis languages easy.
-#' This function reads both the original, limited specification of the format
-#' and the version 2 specification, which is the Apache Arrow IPC file format.
-#'
-#' @inheritParams read_ipc_stream
-#' @inheritParams read_delim_arrow
-#' @param ... additional parameters, passed to [make_readable_file()].
-#'
-#' @return A `data.frame` if `as_data_frame` is `TRUE` (the default), or an
-#' Arrow [Table] otherwise
-#'
-#' @export
-#' @seealso [FeatherReader] and [RecordBatchReader] for lower-level access to reading Arrow IPC data.
-#' @examples
-#' \donttest{
-#' tf <- tempfile()
-#' on.exit(unlink(tf))
-#' write_feather(mtcars, tf)
-#' df <- read_feather(tf)
-#' dim(df)
-#' # Can select columns
-#' df <- read_feather(tf, col_select = starts_with("d"))
-#' }
-read_feather <- function(file, col_select = NULL, as_data_frame = TRUE, ...) {
-  if (!inherits(file, "RandomAccessFile")) {
-    file <- make_readable_file(file, ...)
-    on.exit(file$close())
-  }
-  reader <- FeatherReader$create(file)
-
-  col_select <- enquo(col_select)
-  columns <- if (!quo_is_null(col_select)) {
-    vars_select(names(reader), !!col_select)
-  }
-
-  out <- tryCatch(
-    reader$Read(columns),
-    error = read_compressed_error
-  )
-
-  if (isTRUE(as_data_frame)) {
-    out <- as.data.frame(out)
-  }
-  out
-}
-
-#' @title FeatherReader class
-#' @rdname FeatherReader
-#' @name FeatherReader
-#' @docType class
-#' @usage NULL
-#' @format NULL
-#' @description This class enables you to interact with Feather files. Create
-#' one to connect to a file or other InputStream, and call `Read()` on it to
-#' make an `arrow::Table`. See its usage in [`read_feather()`].
-#'
-#' @section Factory:
-#'
-#' The `FeatherReader$create()` factory method instantiates the object and
-#' takes the following argument:
-#'
-#' - `file` an Arrow file connection object inheriting from `RandomAccessFile`.
-#'
-#' @section Methods:
-#'
-#' - `$Read(columns)`: Returns a `Table` of the selected columns, a vector of
-#'   integer indices
-#' - `$column_names`: Active binding, returns the column names in the Feather file
-#' - `$schema`: Active binding, returns the schema of the Feather file
-#' - `$version`: Active binding, returns `1` or `2`, according to the Feather
-#'   file version
-#'
-#' @export
-#' @include arrow-package.R
-FeatherReader <- R6Class("FeatherReader", inherit = ArrowObject,
-  public = list(
-    Read = function(columns) {
-      ipc___feather___Reader__Read(self, columns)
-    },
-    print = function(...) {
-      cat("FeatherReader:\n")
-      print(self$schema)
-      invisible(self)
-    }
-  ),
-  active = list(
-    # versions are officially 2 for V1 and 3 for V2 :shrug:
-    version = function() ipc___feather___Reader__version(self) - 1L,
-    column_names = function() names(self$schema),
-    schema = function() ipc___feather___Reader__schema(self)
-  )
-)
-
-#' @export
-names.FeatherReader <- function(x) x$column_names
-
-FeatherReader$create <- function(file) {
-  assert_is(file, "RandomAccessFile")
-  ipc___feather___Reader__Open(file)
-}
diff --git a/r/R/field.R b/r/R/field.R
deleted file mode 100644
index 33549d3..0000000
--- a/r/R/field.R
+++ /dev/null
@@ -1,82 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include arrow-package.R
-#' @title Field class
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @description `field()` lets you create an `arrow::Field` that maps a
-#' [DataType][data-type] to a column name. Fields are contained in
-#' [Schemas][Schema].
-#' @section Methods:
-#'
-#' - `f$ToString()`: convert to a string
-#' - `f$Equals(other)`: test for equality. More naturally called as `f == other`
-#'
-#' @rdname Field
-#' @name Field
-#' @export
-Field <- R6Class("Field", inherit = ArrowObject,
-  public = list(
-    ToString = function() {
-      prettier_dictionary_type(Field__ToString(self))
-    },
-    Equals = function(other, ...) {
-      inherits(other, "Field") && Field__Equals(self, other)
-    }
-  ),
-
-  active = list(
-    name = function() {
-      Field__name(self)
-    },
-    nullable = function() {
-      Field__nullable(self)
-    },
-    type = function() {
-      Field__type(self)
-    }
-  )
-)
-Field$create <- function(name, type, metadata) {
-  assert_that(inherits(name, "character"), length(name) == 1L)
-  type <- as_type(type, name)
-  assert_that(missing(metadata), msg = "metadata= is currently ignored")
-  Field__initialize(enc2utf8(name), type, TRUE)
-}
-
-#' @param name field name
-#' @param type logical type, instance of [DataType]
-#' @param metadata currently ignored
-#'
-#' @examples
-#' \donttest{
-#' field("x", int32())
-#' }
-#' @rdname Field
-#' @export
-field <- Field$create
-
-.fields <- function(.list) {
-  if (length(.list)) {
-    assert_that(!is.null(nms <- names(.list)))
-    map2(nms, .list, field)
-  } else {
-    list()
-  }
-}
diff --git a/r/R/filesystem.R b/r/R/filesystem.R
deleted file mode 100644
index 3a624fd..0000000
--- a/r/R/filesystem.R
+++ /dev/null
@@ -1,510 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include arrow-package.R
-#' @title FileSystem entry info
-#' @usage NULL
-#' @format NULL
-#'
-#' @section Methods:
-#'
-#' - `base_name()` : The file base name (component after the last directory
-#'    separator).
-#' - `extension()` : The file extension
-#'
-#' @section Active bindings:
-#'
-#' - `$type`: The file type
-#' - `$path`: The full file path in the filesystem
-#' - `$size`: The size in bytes, if available.  Only regular files are
-#'    guaranteed to have a size.
-#' - `$mtime`: The time of last modification, if available.
-#'
-#' @rdname FileInfo
-#' @export
-FileInfo <- R6Class("FileInfo",
-  inherit = ArrowObject,
-  public = list(
-    base_name = function() fs___FileInfo__base_name(self),
-    extension = function() fs___FileInfo__extension(self)
-  ),
-  active = list(
-    type = function(type) {
-      if (missing(type)) {
-        fs___FileInfo__type(self)
-      } else {
-        fs___FileInfo__set_type(self, type)
-      }
-    },
-    path = function(path) {
-      if (missing(path)) {
-        fs___FileInfo__path(self)
-      } else {
-        invisible(fs___FileInfo__set_path(self))
-      }
-    },
-
-    size = function(size) {
-      if (missing(size)) {
-        fs___FileInfo__size(self)
-      } else {
-        invisible(fs___FileInfo__set_size(self, size))
-      }
-    },
-
-    mtime = function(time) {
-      if (missing(time)) {
-        fs___FileInfo__mtime(self)
-      } else {
-        if (!inherits(time, "POSIXct") && length(time) == 1L) {
-          abort("invalid time")
-        }
-        invisible(fs___FileInfo__set_mtime(self, time))
-      }
-    }
-  )
-)
-
-#' @title file selector
-#' @format NULL
-#'
-#' @section Factory:
-#'
-#' The `$create()` factory method instantiates a `FileSelector` given the 3 fields
-#' described below.
-#'
-#' @section Fields:
-#'
-#' - `base_dir`: The directory in which to select files. If the path exists but
-#'    doesn't point to a directory, this should be an error.
-#' - `allow_not_found`: The behavior if `base_dir` doesn't exist in the
-#'    filesystem. If `FALSE`, an error is returned.  If `TRUE`, an empty
-#'    selection is returned
-#' - `recursive`: Whether to recurse into subdirectories.
-#'
-#' @rdname FileSelector
-#' @export
-FileSelector <- R6Class("FileSelector",
-  inherit = ArrowObject,
-  active = list(
-    base_dir = function() fs___FileSelector__base_dir(self),
-    allow_not_found = function() fs___FileSelector__allow_not_found(self),
-    recursive = function() fs___FileSelector__recursive(self)
-  )
-)
-
-FileSelector$create <- function(base_dir, allow_not_found = FALSE, recursive = FALSE) {
-  fs___FileSelector__create(clean_path_rel(base_dir), allow_not_found, recursive)
-}
-
-#' @title FileSystem classes
-#' @description `FileSystem` is an abstract file system API,
-#' `LocalFileSystem` is an implementation accessing files
-#' on the local machine. `SubTreeFileSystem` is an implementation that delegates
-#' to another implementation after prepending a fixed base path
-#'
-#' @section Factory:
-#'
-#' `LocalFileSystem$create()` returns the object and takes no arguments.
-#'
-#' `SubTreeFileSystem$create()` takes the following arguments:
-#'
-#' - `base_path`, a string path
-#' - `base_fs`, a `FileSystem` object
-#'
-#' `S3FileSystem$create()` optionally takes arguments:
-#'
-#' - `anonymous`: logical, default `FALSE`. If true, will not attempt to look up
-#'    credentials using standard AWS configuration methods.
-#' - `access_key`, `secret_key`: authentication credentials. If one is provided,
-#'    the other must be as well. If both are provided, they will override any
-#'    AWS configuration set at the environment level.
-#' - `session_token`: optional string for authentication along with
-#'    `access_key` and `secret_key`
-#' - `role_arn`: string AWS ARN of an AccessRole. If provided instead of `access_key` and
-#'    `secret_key`, temporary credentials will be fetched by assuming this role.
-#' - `session_name`: optional string identifier for the assumed role session.
-#' - `external_id`: optional unique string identifier that might be required
-#'    when you assume a role in another account.
-#' - `load_frequency`: integer, frequency (in seconds) with which temporary
-#'    credentials from an assumed role session will be refreshed. Default is
-#'    900 (i.e. 15 minutes)
-#' - `region`: AWS region to connect to. If omitted, the AWS library will
-#'    provide a sensible default based on client configuration, falling back
-#'    to "us-east-1" if no other alternatives are found.
-#' - `endpoint_override`: If non-empty, override region with a connect string
-#'    such as "localhost:9000". This is useful for connecting to file systems
-#'    that emulate S3.
-#' - `scheme`: S3 connection transport (default "https")
-#' - `background_writes`: logical, whether `OutputStream` writes will be issued
-#'    in the background, without blocking (default `TRUE`)
-#'
-#' @section Methods:
-#'
-#' - `$GetFileInfo(x)`: `x` may be a [FileSelector][FileSelector] or a character
-#'    vector of paths. Returns a list of [FileInfo][FileInfo]
-#' - `$CreateDir(path, recursive = TRUE)`: Create a directory and subdirectories.
-#' - `$DeleteDir(path)`: Delete a directory and its contents, recursively.
-#' - `$DeleteDirContents(path)`: Delete a directory's contents, recursively.
-#'    Like `$DeleteDir()`,
-#'    but doesn't delete the directory itself. Passing an empty path (`""`) will
-#'    wipe the entire filesystem tree.
-#' - `$DeleteFile(path)` : Delete a file.
-#' - `$DeleteFiles(paths)` : Delete many files. The default implementation
-#'    issues individual delete operations in sequence.
-#' - `$Move(src, dest)`: Move / rename a file or directory. If the destination
-#'    exists:
-#'      if it is a non-empty directory, an error is returned
-#'      otherwise, if it has the same type as the source, it is replaced
-#'      otherwise, behavior is unspecified (implementation-dependent).
-#' - `$CopyFile(src, dest)`: Copy a file. If the destination exists and is a
-#'    directory, an error is returned. Otherwise, it is replaced.
-#' - `$OpenInputStream(path)`: Open an [input stream][InputStream] for
-#'    sequential reading.
-#' - `$OpenInputFile(path)`: Open an [input file][RandomAccessFile] for random
-#'    access reading.
-#' - `$OpenOutputStream(path)`: Open an [output stream][OutputStream] for
-#'    sequential writing.
-#' - `$OpenAppendStream(path)`: Open an [output stream][OutputStream] for
-#'    appending.
-#'
-#' @section Active bindings:
-#'
-#' - `$type_name`: string filesystem type name, such as "local", "s3", etc.
-#' - `$region`: string AWS region, for `S3FileSystem` and `SubTreeFileSystem`
-#'    containing a `S3FileSystem`
-#' - `$base_fs`: for `SubTreeFileSystem`, the `FileSystem` it contains
-#' - `$base_path`: for `SubTreeFileSystem`, the path in `$base_fs` which is considered
-#'    root in this `SubTreeFileSystem`.
-#'
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#'
-#' @rdname FileSystem
-#' @name FileSystem
-#' @export
-FileSystem <- R6Class("FileSystem", inherit = ArrowObject,
-  public = list(
-    GetFileInfo = function(x) {
-      if (inherits(x, "FileSelector")) {
-        fs___FileSystem__GetTargetInfos_FileSelector(self, x)
-      } else if (is.character(x)){
-        fs___FileSystem__GetTargetInfos_Paths(self, clean_path_rel(x))
-      } else {
-        abort("incompatible type for FileSystem$GetFileInfo()")
-      }
-    },
-
-    CreateDir = function(path, recursive = TRUE) {
-      fs___FileSystem__CreateDir(self, clean_path_rel(path), isTRUE(recursive))
-    },
-
-    DeleteDir = function(path) {
-      fs___FileSystem__DeleteDir(self, clean_path_rel(path))
-    },
-
-    DeleteDirContents = function(path) {
-      fs___FileSystem__DeleteDirContents(self, clean_path_rel(path))
-    },
-
-    DeleteFile = function(path) {
-      fs___FileSystem__DeleteFile(self, clean_path_rel(path))
-    },
-
-    DeleteFiles = function(paths) {
-      fs___FileSystem__DeleteFiles(self, clean_path_rel(paths))
-    },
-
-    Move = function(src, dest) {
-      fs___FileSystem__Move(self, clean_path_rel(src), clean_path_rel(dest))
-    },
-
-    CopyFile = function(src, dest) {
-      fs___FileSystem__CopyFile(self, clean_path_rel(src), clean_path_rel(dest))
-    },
-
-    OpenInputStream = function(path) {
-      fs___FileSystem__OpenInputStream(self, clean_path_rel(path))
-    },
-    OpenInputFile = function(path) {
-      fs___FileSystem__OpenInputFile(self, clean_path_rel(path))
-    },
-    OpenOutputStream = function(path) {
-      fs___FileSystem__OpenOutputStream(self, clean_path_rel(path))
-    },
-    OpenAppendStream = function(path) {
-      fs___FileSystem__OpenAppendStream(self, clean_path_rel(path))
-    },
-
-    # Friendlier R user interface
-    path = function(x) SubTreeFileSystem$create(x, self),
-    cd = function(x) SubTreeFileSystem$create(x, self),
-    ls = function(path = "", ...) {
-      selector <- FileSelector$create(path, ...) # ... for recursive = TRUE
-      infos <- self$GetFileInfo(selector)
-      map_chr(infos, ~.$path)
-      # TODO: add full.names argument like base::dir() (default right now is TRUE)
-      # TODO: see fs package for glob/regexp filtering
-      # TODO: verbose method that shows other attributes as df
-      # TODO: print methods for FileInfo, SubTreeFileSystem, S3FileSystem
-    }
-  ),
-  active = list(
-    type_name = function() fs___FileSystem__type_name(self)
-  )
-)
-FileSystem$from_uri <- function(uri) {
-  assert_that(is.string(uri))
-  fs___FileSystemFromUri(uri)
-}
-
-get_paths_and_filesystem <- function(x, filesystem = NULL) {
-  # Wrapper around FileSystem$from_uri that handles local paths
-  # and an optional explicit filesystem
-  if (inherits(x, "SubTreeFileSystem")) {
-    return(list(fs = x$base_fs, path = x$base_path))
-  }
-  assert_that(is.character(x))
-  are_urls <- are_urls(x)
-  if (any(are_urls)) {
-    if (!all(are_urls)) {
-      stop("Vectors of mixed paths and URIs are not supported", call. = FALSE)
-    }
-    if (!is.null(filesystem)) {
-      # Stop? Can't have URL (which yields a fs) and another fs
-    }
-    x <- lapply(x, FileSystem$from_uri)
-    if (length(unique(map(x, ~class(.$fs)))) > 1) {
-      stop(
-        "Vectors of URIs for different file systems are not supported",
-        call. = FALSE
-      )
-    }
-    fs  <- x[[1]]$fs
-    path <- map_chr(x, ~.$path) # singular name "path" used for compatibility
-  } else {
-    fs <- filesystem %||% LocalFileSystem$create()
-    if (inherits(fs, "LocalFileSystem")) {
-      path <- clean_path_abs(x)
-    } else {
-      path <- clean_path_rel(x)
-    }
-  }
-  list(
-    fs = fs,
-    path = path
-  )
-}
-
-# variant of the above function that asserts that x is either a scalar string
-# or a SubTreeFileSystem
-get_path_and_filesystem <- function(x, filesystem = NULL) {
-  assert_that(is.string(x) || inherits(x, "SubTreeFileSystem"))
-  get_paths_and_filesystem(x, filesystem)
-}
-
-is_url <- function(x) is.string(x) && grepl("://", x)
-are_urls <- function(x) if (!is.character(x)) FALSE else grepl("://", x)
-
-#' @usage NULL
-#' @format NULL
-#' @rdname FileSystem
-#' @export
-LocalFileSystem <- R6Class("LocalFileSystem", inherit = FileSystem)
-LocalFileSystem$create <- function() {
-  fs___LocalFileSystem__create()
-}
-
-#' @usage NULL
-#' @format NULL
-#' @rdname FileSystem
-#' @importFrom utils modifyList
-#' @export
-S3FileSystem <- R6Class("S3FileSystem", inherit = FileSystem,
-  active = list(
-    region = function() fs___S3FileSystem__region(self)
-  )
-)
-S3FileSystem$create <- function(anonymous = FALSE, ...) {
-  args <- list2(...)
-  if (anonymous) {
-    invalid_args <- intersect(c("access_key", "secret_key", "session_token", "role_arn", "session_name", "external_id", "load_frequency"), names(args))
-    if (length(invalid_args)) {
-      stop("Cannot specify ", oxford_paste(invalid_args), " when anonymous = TRUE", call. = FALSE)
-    }
-  } else {
-    keys_present <- length(intersect(c("access_key", "secret_key"), names(args)))
-    if (keys_present == 1) {
-      stop("Key authentication requires both access_key and secret_key", call. = FALSE)
-    }
-    if ("session_token" %in% names(args) && keys_present != 2) {
-      stop(
-        "In order to initialize a session with temporary credentials, ",
-        "both secret_key and access_key must be provided ",
-        "in addition to session_token.",
-        call. = FALSE
-      )
-    }
-    arn <- "role_arn" %in% names(args)
-    if (keys_present == 2 && arn) {
-      stop("Cannot provide both key authentication and role_arn", call. = FALSE)
-    }
-    arn_extras <- intersect(c("session_name", "external_id", "load_frequency"), names(args))
-    if (length(arn_extras) > 0 && !arn) {
-      stop("Cannot specify ", oxford_paste(arn_extras), " without providing a role_arn string", call. = FALSE)
-    }
-  }
-  args <- c(modifyList(default_s3_options, args), anonymous = anonymous)
-  exec(fs___S3FileSystem__create, !!!args)
-}
-
-default_s3_options <- list(
-  access_key = "",
-  secret_key = "",
-  session_token = "",
-  role_arn = "",
-  session_name = "",
-  external_id = "",
-  load_frequency = 900L,
-  region = "",
-  endpoint_override = "",
-  scheme = "",
-  background_writes = TRUE
-)
-
-#' Connect to an AWS S3 bucket
-#'
-#' `s3_bucket()` is a convenience function to create an `S3FileSystem` object
-#' that automatically detects the bucket's AWS region and holding onto the its
-#' relative path.
-#'
-#' @param bucket string S3 bucket name or path
-#' @param ... Additional connection options, passed to `S3FileSystem$create()`
-#' @return A `SubTreeFileSystem` containing an `S3FileSystem` and the bucket's
-#' relative path. Note that this function's success does not guarantee that you
-#' are authorized to access the bucket's contents.
-#' @examples
-#' if (arrow_with_s3()) {
-#'   bucket <- s3_bucket("ursa-labs-taxi-data")
-#' }
-#' @export
-s3_bucket <- function(bucket, ...) {
-  assert_that(is.string(bucket))
-  args <- list2(...)
-
-  # Use FileSystemFromUri to detect the bucket's region
-  if (!is_url(bucket)) {
-    bucket <- paste0("s3://", bucket)
-  }
-  fs_and_path <- FileSystem$from_uri(bucket)
-  fs <- fs_and_path$fs
-  # If there are no additional S3Options, we can use that filesystem
-  # Otherwise, take the region that was detected and make a new fs with the args
-  if (length(args)) {
-    args$region <- fs$region
-    fs <- exec(S3FileSystem$create, !!!args)
-  }
-  # Return a subtree pointing at that bucket path
-  SubTreeFileSystem$create(fs_and_path$path, fs)
-}
-
-#' @usage NULL
-#' @format NULL
-#' @rdname FileSystem
-#' @export
-SubTreeFileSystem <- R6Class("SubTreeFileSystem", inherit = FileSystem,
-  public = list(
-    print = function(...) {
-      if (inherits(self$base_fs, "LocalFileSystem")) {
-        cat("SubTreeFileSystem: ", "file://", self$base_path, "\n", sep = "")
-      } else if (inherits(self$base_fs, "S3FileSystem")) {
-        cat("SubTreeFileSystem: ", "s3://", self$base_path, "\n", sep = "")
-      } else {
-        cat("SubTreeFileSystem", "\n", sep = "")
-      }
-      invisible(self)
-    }
-  ),
-  active = list(
-    base_fs = function() {
-      fs___SubTreeFileSystem__base_fs(self)
-    },
-    base_path = function() fs___SubTreeFileSystem__base_path(self)
-  )
-)
-SubTreeFileSystem$create <- function(base_path, base_fs = NULL) {
-  fs_and_path <- get_path_and_filesystem(base_path, base_fs)
-  fs___SubTreeFileSystem__create(fs_and_path$path, fs_and_path$fs)
-}
-
-#' @export
-`$.SubTreeFileSystem` <- function(x, name, ...) {
-  # This is to allow delegating methods/properties to the base_fs
-  assert_that(is.string(name))
-  if (name %in% ls(envir = x)) {
-    get(name, x)
-  } else if (name %in% ls(envir = x$base_fs)) {
-    get(name, x$base_fs)
-  } else {
-    NULL
-  }
-}
-
-#' Copy files between FileSystems
-#'
-#' @param from A string path to a local directory or file, a URI, or a
-#' `SubTreeFileSystem`. Files will be copied recursively from this path.
-#' @param to A string path to a local directory or file, a URI, or a
-#' `SubTreeFileSystem`. Directories will be created as necessary
-#' @param chunk_size The maximum size of block to read before flushing
-#' to the destination file. A larger chunk_size will use more memory while
-#' copying but may help accommodate high latency FileSystems.
-#' @return Nothing: called for side effects in the file system
-#' @export
-#' @examples
-#' \dontrun{
-#' # Copy an S3 bucket's files to a local directory:
-#' copy_files("s3://your-bucket-name", "local-directory")
-#' # Using a FileSystem object
-#' copy_files(s3_bucket("your-bucket-name"), "local-directory")
-#' # Or go the other way, from local to S3
-#' copy_files("local-directory", s3_bucket("your-bucket-name"))
-#' }
-copy_files <- function(from, to, chunk_size = 1024L * 1024L) {
-  from <- get_path_and_filesystem(from)
-  to <- get_path_and_filesystem(to)
-  invisible(fs___CopyFiles(
-    from$fs,
-    FileSelector$create(from$path, recursive = TRUE),
-    to$fs,
-    to$path,
-    chunk_size,
-    option_use_threads()
-  ))
-}
-
-clean_path_abs <- function(path) {
-  # Make sure we have a valid, absolute, forward-slashed path for passing to Arrow
-  normalizePath(path, winslash = "/", mustWork = FALSE)
-}
-
-clean_path_rel <- function(path) {
-  # Make sure all path separators are "/", not "\" as on Windows
-  path_sep <- ifelse(tolower(Sys.info()[["sysname"]]) == "windows", "\\\\", "/")
-  gsub(path_sep, "/", path)
-}
diff --git a/r/R/flight.R b/r/R/flight.R
deleted file mode 100644
index 486c59a..0000000
--- a/r/R/flight.R
+++ /dev/null
@@ -1,121 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Load a Python Flight server
-#'
-#' @param name string Python module name
-#' @param path file system path where the Python module is found. Default is
-#' to look in the `inst/` directory for included modules.
-#' @export
-load_flight_server <- function(name, path = system.file(package = "arrow")) {
-  reticulate::import_from_path(name, path)
-}
-
-#' Connect to a Flight server
-#'
-#' @param host string hostname to connect to
-#' @param port integer port to connect on
-#' @param scheme URL scheme, default is "grpc+tcp"
-#' @return A `pyarrow.flight.FlightClient`.
-#' @export
-flight_connect <- function(host = "localhost", port, scheme = "grpc+tcp") {
-  pa <- reticulate::import("pyarrow")
-  location <- paste0(scheme, "://", host, ":", port)
-  pa$flight$FlightClient(location)
-}
-
-#' Send data to a Flight server
-#'
-#' @param client `pyarrow.flight.FlightClient`, as returned by [flight_connect()]
-#' @param data `data.frame`, [RecordBatch], or [Table] to upload
-#' @param path string identifier to store the data under
-#' @param overwrite logical: if `path` exists on `client` already, should we
-#' replace it with the contents of `data`? Default is `TRUE`; if `FALSE` and
-#' `path` exists, the function will error.
-#' @return `client`, invisibly.
-#' @export
-flight_put <- function(client, data, path, overwrite = TRUE) {
-  if (!overwrite && flight_path_exists(client, path)) {
-    stop(path, " exists.", call. = FALSE)
-  }
-  if (is.data.frame(data)) {
-    data <- Table$create(data)
-  }
-  py_data <- reticulate::r_to_py(data)
-  writer <- client$do_put(descriptor_for_path(path), py_data$schema)[[1]]
-  if (inherits(data, "RecordBatch")) {
-    writer$write_batch(py_data)
-  } else {
-    writer$write_table(py_data)
-  }
-  writer$close()
-  invisible(client)
-}
-
-#' Get data from a Flight server
-#'
-#' @param client `pyarrow.flight.FlightClient`, as returned by [flight_connect()]
-#' @param path string identifier under which data is stored
-#' @return A [Table]
-#' @export
-flight_get <- function(client, path) {
-  reader <- flight_reader(client, path)
-  reader$read_all()
-}
-
-# TODO: could use this as a RecordBatch iterator, call $read_chunk() on this
-flight_reader <- function(client, path) {
-  info <- client$get_flight_info(descriptor_for_path(path))
-  # Hack: assume a single ticket, on the same server as client is already connected
-  ticket <- info$endpoints[[1]]$ticket
-  client$do_get(ticket)
-}
-
-descriptor_for_path <- function(path) {
-  pa <- reticulate::import("pyarrow")
-  pa$flight$FlightDescriptor$for_path(path)
-}
-
-#' See available resources on a Flight server
-#'
-#' @inheritParams flight_get
-#' @return `list_flights()` returns a character vector of paths.
-#' `flight_path_exists()` returns a logical value, the equivalent of `path %in% list_flights()`
-#' @export
-list_flights <- function(client) {
-  generator <- client$list_flights()
-  out <- reticulate::iterate(generator, function(x) as.character(x$descriptor$path[[1]]))
-  out
-}
-
-#' @rdname list_flights
-#' @export
-flight_path_exists <- function(client, path) {
-  it_exists <- tryCatch({
-      client$get_flight_info(descriptor_for_path(path))
-      TRUE
-    },
-    error = function(e) {
-      msg <- conditionMessage(e)
-      if (!any(grepl("ArrowKeyError", msg))) {
-        # Raise an error if this fails for any reason other than not found
-        stop(e)
-      }
-      FALSE
-    }
-  )
-}
diff --git a/r/R/install-arrow.R b/r/R/install-arrow.R
deleted file mode 100644
index 63db8ed..0000000
--- a/r/R/install-arrow.R
+++ /dev/null
@@ -1,139 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Install or upgrade the Arrow library
-#'
-#' Use this function to install the latest release of `arrow`, to switch to or
-#' from a nightly development version, or on Linux to try reinstalling with
-#' all necessary C++ dependencies.
-#'
-#' Note that, unlike packages like `tensorflow`, `blogdown`, and others that
-#' require external dependencies, you do not need to run `install_arrow()`
-#' after a successful `arrow` installation.
-#'
-#' @param nightly logical: Should we install a development version of the
-#' package, or should we install from CRAN (the default).
-#' @param binary On Linux, value to set for the environment variable
-#' `LIBARROW_BINARY`, which governs how C++ binaries are used, if at all.
-#' The default value, `TRUE`, tells the installation script to detect the
-#' Linux distribution and version and find an appropriate C++ library. `FALSE`
-#' would tell the script not to retrieve a binary and instead build Arrow C++
-#' from source. Other valid values are strings corresponding to a Linux
-#' distribution-version, to override the value that would be detected.
-#' See `vignette("install", package = "arrow")` for further details.
-#' @param use_system logical: Should we use `pkg-config` to look for Arrow
-#' system packages? Default is `FALSE`. If `TRUE`, source installation may be
-#' faster, but there is a risk of version mismatch. This sets the
-#' `ARROW_USE_PKG_CONFIG` environment variable.
-#' @param minimal logical: If building from source, should we build without
-#' optional dependencies (compression libraries, for example)? Default is
-#' `FALSE`. This sets the `LIBARROW_MINIMAL` environment variable.
-#' @param verbose logical: Print more debugging output when installing? Default
-#' is `FALSE`. This sets the `ARROW_R_DEV` environment variable.
-#' @param repos character vector of base URLs of the repositories to install
-#' from (passed to `install.packages()`)
-#' @param ... Additional arguments passed to `install.packages()`
-#' @export
-#' @importFrom utils install.packages
-#' @seealso [arrow_available()] to see if the package was configured with
-#' necessary C++ dependencies. `vignette("install", package = "arrow")` for
-#' more ways to tune installation on Linux.
-install_arrow <- function(nightly = FALSE,
-                          binary = Sys.getenv("LIBARROW_BINARY", TRUE),
-                          use_system = Sys.getenv("ARROW_USE_PKG_CONFIG", FALSE),
-                          minimal = Sys.getenv("LIBARROW_MINIMAL", FALSE),
-                          verbose = Sys.getenv("ARROW_R_DEV", FALSE),
-                          repos = getOption("repos"),
-                          ...) {
-  sysname <- tolower(Sys.info()[["sysname"]])
-  conda <- isTRUE(grepl("conda", R.Version()$platform))
-
-  if (conda) {
-    if (nightly) {
-      system("conda install -y -c arrow-nightlies -c conda-forge --strict-channel-priority r-arrow")
-    } else {
-      system("conda install -y -c conda-forge --strict-channel-priority r-arrow")
-    }
-  } else {
-    Sys.setenv(
-      LIBARROW_DOWNLOAD = "true",
-      LIBARROW_BINARY = binary,
-      LIBARROW_MINIMAL = minimal,
-      ARROW_R_DEV = verbose,
-      ARROW_USE_PKG_CONFIG = use_system
-    )
-    # On the M1, we can't use the usual autobrew, which pulls Intel dependencies
-    apple_m1 <- grepl("arm-apple|aarch64.*darwin", R.Version()$platform)
-    # On Rosetta, we have to build without JEMALLOC, so we also can't autobrew
-    rosetta <- identical(sysname, "darwin") && identical(system("sysctl -n sysctl.proc_translated", intern = TRUE), "1")
-    if (rosetta) {
-      Sys.setenv(ARROW_JEMALLOC = "OFF")
-    }
-    if (apple_m1 || rosetta) {
-      Sys.setenv(FORCE_BUNDLED_BUILD = "true")
-    }
-
-    opts <- list()
-    if (apple_m1 || rosetta) {
-      # Skip binaries (esp. for rosetta)
-      opts$pkgType <- "source"
-    } else if (isTRUE(binary)) {
-      # Unless otherwise directed, don't consider newer source packages when
-      # options(pkgType) == "both" (default on win/mac)
-      opts$install.packages.check.source <- "no"
-      opts$install.packages.compile.from.source <- "never"
-    }
-    if (length(opts)) {
-      old <- options(opts)
-      on.exit(options(old))
-    }
-    install.packages("arrow", repos = arrow_repos(repos, nightly), ...)
-  }
-  if ("arrow" %in% loadedNamespaces()) {
-    # If you've just sourced this file, "arrow" won't be (re)loaded
-    reload_arrow()
-  }
-}
-
-arrow_repos <- function(repos = getOption("repos"), nightly = FALSE) {
-  if (length(repos) == 0 || identical(repos, c(CRAN = "@CRAN@"))) {
-    # Set the default/CDN
-    repos <- "https://cloud.r-project.org/"
-  }
-  dev_repo <- getOption("arrow.dev_repo", "https://arrow-r-nightly.s3.amazonaws.com")
-  # Remove it if it's there (so nightly=FALSE won't accidentally pull from it)
-  repos <- setdiff(repos, dev_repo)
-  if (nightly) {
-    # Add it first
-    repos <- c(dev_repo, repos)
-  }
-  repos
-}
-
-reload_arrow <- function() {
-  if (requireNamespace("pkgload", quietly = TRUE)) {
-    is_attached <- "package:arrow" %in% search()
-    pkgload::unload("arrow")
-    if (is_attached) {
-      require("arrow", character.only = TRUE, quietly = TRUE)
-    } else {
-      requireNamespace("arrow", quietly = TRUE)
-    }
-  } else {
-    message("Please restart R to use the 'arrow' package.")
-  }
-}
diff --git a/r/R/io.R b/r/R/io.R
deleted file mode 100644
index 5f015ce..0000000
--- a/r/R/io.R
+++ /dev/null
@@ -1,290 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include arrow-package.R
-#' @include enums.R
-#' @include buffer.R
-
-# OutputStream ------------------------------------------------------------
-
-Writable <- R6Class("Writable", inherit = ArrowObject,
-  public = list(
-    write = function(x) io___Writable__write(self, buffer(x))
-  )
-)
-
-#' @title OutputStream classes
-#' @description `FileOutputStream` is for writing to a file;
-#' `BufferOutputStream` writes to a buffer;
-#' You can create one and pass it to any of the table writers, for example.
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @section Factory:
-#'
-#' The `$create()` factory methods instantiate the `OutputStream` object and
-#' take the following arguments, depending on the subclass:
-#'
-#' - `path` For `FileOutputStream`, a character file name
-#' - `initial_capacity` For `BufferOutputStream`, the size in bytes of the
-#'    buffer.
-#'
-#' @section Methods:
-#'
-#'  - `$tell()`: return the position in the stream
-#'  - `$close()`: close the stream
-#'  - `$write(x)`: send `x` to the stream
-#'  - `$capacity()`: for `BufferOutputStream`
-#'  - `$finish()`: for `BufferOutputStream`
-#'  - `$GetExtentBytesWritten()`: for `MockOutputStream`, report how many bytes
-#'    were sent.
-#'
-#' @rdname OutputStream
-#' @name OutputStream
-OutputStream <- R6Class("OutputStream", inherit = Writable,
-  public = list(
-    close = function() io___OutputStream__Close(self),
-    tell = function() io___OutputStream__Tell(self)
-  )
-)
-
-#' @usage NULL
-#' @format NULL
-#' @rdname OutputStream
-#' @export
-FileOutputStream <- R6Class("FileOutputStream", inherit = OutputStream)
-FileOutputStream$create <- function(path) {
-  io___FileOutputStream__Open(clean_path_abs(path))
-}
-
-#' @usage NULL
-#' @format NULL
-#' @rdname OutputStream
-#' @export
-BufferOutputStream <- R6Class("BufferOutputStream", inherit = OutputStream,
-  public = list(
-    capacity = function() io___BufferOutputStream__capacity(self),
-    finish = function() io___BufferOutputStream__Finish(self),
-    write = function(bytes) io___BufferOutputStream__Write(self, bytes),
-    tell = function() io___BufferOutputStream__Tell(self)
-  )
-)
-BufferOutputStream$create <- function(initial_capacity = 0L) {
-  io___BufferOutputStream__Create(initial_capacity)
-}
-
-# InputStream -------------------------------------------------------------
-
-
-Readable <- R6Class("Readable", inherit = ArrowObject,
-  public = list(
-    Read = function(nbytes) io___Readable__Read(self, nbytes)
-  )
-)
-
-#' @title InputStream classes
-#' @description `RandomAccessFile` inherits from `InputStream` and is a base
-#' class for: `ReadableFile` for reading from a file; `MemoryMappedFile` for
-#' the same but with memory mapping; and `BufferReader` for reading from a
-#' buffer. Use these with the various table readers.
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @section Factory:
-#'
-#' The `$create()` factory methods instantiate the `InputStream` object and
-#' take the following arguments, depending on the subclass:
-#'
-#' - `path` For `ReadableFile`, a character file name
-#' - `x` For `BufferReader`, a [Buffer] or an object that can be
-#'    made into a buffer via `buffer()`.
-#'
-#' To instantiate a `MemoryMappedFile`, call [mmap_open()].
-#'
-#' @section Methods:
-#'
-#'  - `$GetSize()`:
-#'  - `$supports_zero_copy()`: Logical
-#'  - `$seek(position)`: go to that position in the stream
-#'  - `$tell()`: return the position in the stream
-#'  - `$close()`: close the stream
-#'  - `$Read(nbytes)`: read data from the stream, either a specified `nbytes` or
-#'    all, if `nbytes` is not provided
-#'  - `$ReadAt(position, nbytes)`: similar to `$seek(position)$Read(nbytes)`
-#'  - `$Resize(size)`: for a `MemoryMappedFile` that is writeable
-#'
-#' @rdname InputStream
-#' @name InputStream
-InputStream <- R6Class("InputStream", inherit = Readable,
-  public = list(
-    close = function() io___InputStream__Close(self)
-  )
-)
-
-#' @usage NULL
-#' @format NULL
-#' @rdname InputStream
-#' @export
-RandomAccessFile <- R6Class("RandomAccessFile", inherit = InputStream,
-  public = list(
-    GetSize = function() io___RandomAccessFile__GetSize(self),
-    supports_zero_copy = function() io___RandomAccessFile__supports_zero_copy(self),
-    seek = function(position) io___RandomAccessFile__Seek(self, position),
-    tell = function() io___RandomAccessFile__Tell(self),
-
-    Read = function(nbytes = NULL) {
-      if (is.null(nbytes)) {
-        io___RandomAccessFile__Read0(self)
-      } else {
-        io___Readable__Read(self, nbytes)
-      }
-    },
-
-    ReadAt = function(position, nbytes = NULL) {
-      if (is.null(nbytes)) {
-        nbytes <- self$GetSize() - position
-      }
-      io___RandomAccessFile__ReadAt(self, position, nbytes)
-    }
-  )
-)
-
-#' @usage NULL
-#' @format NULL
-#' @rdname InputStream
-#' @export
-MemoryMappedFile <- R6Class("MemoryMappedFile", inherit = RandomAccessFile,
-  public = list(
-    Resize = function(size) io___MemoryMappedFile__Resize(self, size)
-  )
-)
-
-#' @usage NULL
-#' @format NULL
-#' @rdname InputStream
-#' @export
-ReadableFile <- R6Class("ReadableFile", inherit = RandomAccessFile)
-ReadableFile$create <- function(path) {
-  io___ReadableFile__Open(clean_path_abs(path))
-}
-
-#' @usage NULL
-#' @format NULL
-#' @rdname InputStream
-#' @export
-BufferReader <- R6Class("BufferReader", inherit = RandomAccessFile)
-BufferReader$create <- function(x) {
-  x <- buffer(x)
-  io___BufferReader__initialize(x)
-}
-
-#' Create a new read/write memory mapped file of a given size
-#'
-#' @param path file path
-#' @param size size in bytes
-#'
-#' @return a [arrow::io::MemoryMappedFile][MemoryMappedFile]
-#'
-#' @export
-mmap_create <- function(path, size) {
-  path <- clean_path_abs(path)
-  io___MemoryMappedFile__Create(path, size)
-}
-
-#' Open a memory mapped file
-#'
-#' @param path file path
-#' @param mode file mode (read/write/readwrite)
-#'
-#' @export
-mmap_open <- function(path, mode = c("read", "write", "readwrite")) {
-  mode <- match(match.arg(mode), c("read", "write", "readwrite")) - 1L
-  path <- clean_path_abs(path)
-  io___MemoryMappedFile__Open(path, mode)
-}
-
-#' Handle a range of possible input sources
-#' @param file A character file name, `raw` vector, or an Arrow input stream
-#' @param mmap Logical: whether to memory-map the file (default `TRUE`)
-#' @param compression If the file is compressed, created a [CompressedInputStream]
-#' with this compression codec, either a [Codec] or the string name of one.
-#' If `NULL` (default) and `file` is a string file name, the function will try
-#' to infer compression from the file extension.
-#' @param filesystem If not `NULL`, `file` will be opened via the
-#' `filesystem$OpenInputFile()` filesystem method, rather than the `io` module's
-#' `MemoryMappedFile` or `ReadableFile` constructors.
-#' @return An `InputStream` or a subclass of one.
-#' @keywords internal
-make_readable_file <- function(file, mmap = TRUE, compression = NULL, filesystem = NULL) {
-  if (inherits(file, "SubTreeFileSystem")) {
-    filesystem <- file$base_fs
-    file <- file$base_path
-  }
-  if (is.string(file)) {
-    if (is_url(file)) {
-      fs_and_path <- FileSystem$from_uri(file)
-      filesystem <- fs_and_path$fs
-      file <- fs_and_path$path
-    }
-    if (is.null(compression)) {
-      # Infer compression from the file path
-      compression <- detect_compression(file)
-    }
-    if (!is.null(filesystem)) {
-      file <- filesystem$OpenInputFile(file)
-    } else if (isTRUE(mmap)) {
-      file <- mmap_open(file)
-    } else {
-      file <- ReadableFile$create(file)
-    }
-    if (!identical(compression, "uncompressed")) {
-      file <- CompressedInputStream$create(file, compression)
-    }
-  } else if (inherits(file, c("raw", "Buffer"))) {
-    file <- BufferReader$create(file)
-  }
-  assert_is(file, "InputStream")
-  file
-}
-
-make_output_stream <- function(x, filesystem = NULL) {
-  if (inherits(x, "SubTreeFileSystem")) {
-    filesystem <- x$base_fs
-    x <- x$base_path
-  } else if (is_url(x)) {
-    fs_and_path <- FileSystem$from_uri(x)
-    filesystem = fs_and_path$fs
-    x <- fs_and_path$path
-  }
-  assert_that(is.string(x))
-  if (is.null(filesystem)) {
-    FileOutputStream$create(x)
-  } else {
-    filesystem$OpenOutputStream(x)
-  }
-}
-
-detect_compression <- function(path) {
-  assert_that(is.string(path))
-  switch(tools::file_ext(path),
-    bz2 = "bz2",
-    gz = "gzip",
-    lz4 = "lz4",
-    zst = "zstd",
-    "uncompressed"
-  )
-}
diff --git a/r/R/ipc_stream.R b/r/R/ipc_stream.R
deleted file mode 100644
index 4f506f3..0000000
--- a/r/R/ipc_stream.R
+++ /dev/null
@@ -1,113 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Write Arrow IPC stream format
-#'
-#' Apache Arrow defines two formats for [serializing data for interprocess
-#' communication (IPC)](https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc):
-#' a "stream" format and a "file" format, known as Feather. `write_ipc_stream()`
-#' and [write_feather()] write those formats, respectively.
-#'
-#' `write_arrow()`, a wrapper around `write_ipc_stream()` and `write_feather()`
-#' with some nonstandard behavior, is deprecated. You should explicitly choose
-#' the function that will write the desired IPC format (stream or file) since
-#' either can be written to a file or `OutputStream`.
-#'
-#' @inheritParams write_feather
-#' @param ... extra parameters passed to `write_feather()`.
-#'
-#' @return `x`, invisibly.
-#' @seealso [write_feather()] for writing IPC files. [write_to_raw()] to
-#' serialize data to a buffer.
-#' [RecordBatchWriter] for a lower-level interface.
-#' @export
-write_ipc_stream <- function(x, sink, ...) {
-  x_out <- x # So we can return the data we got
-  if (is.data.frame(x)) {
-    x <- Table$create(x)
-  }
-  if (!inherits(sink, "OutputStream")) {
-    sink <- make_output_stream(sink)
-    on.exit(sink$close())
-  }
-
-  writer <- RecordBatchStreamWriter$create(sink, x$schema)
-  writer$write(x)
-  writer$close()
-  invisible(x_out)
-}
-
-#' Write Arrow data to a raw vector
-#'
-#' [write_ipc_stream()] and [write_feather()] write data to a sink and return
-#' the data (`data.frame`, `RecordBatch`, or `Table`) they were given.
-#' This function wraps those so that you can serialize data to a buffer and
-#' access that buffer as a `raw` vector in R.
-#' @inheritParams write_feather
-#' @param format one of `c("stream", "file")`, indicating the IPC format to use
-#' @return A `raw` vector containing the bytes of the IPC serialized data.
-#' @export
-write_to_raw <- function(x, format = c("stream", "file")) {
-  sink <- BufferOutputStream$create()
-  if (match.arg(format) == "stream") {
-    write_ipc_stream(x, sink)
-  } else {
-    write_feather(x, sink)
-  }
-  as.raw(buffer(sink))
-}
-
-#' Read Arrow IPC stream format
-#'
-#' Apache Arrow defines two formats for [serializing data for interprocess
-#' communication (IPC)](https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc):
-#' a "stream" format and a "file" format, known as Feather. `read_ipc_stream()`
-#' and [read_feather()] read those formats, respectively.
-#'
-#' `read_arrow()`, a wrapper around `read_ipc_stream()` and `read_feather()`,
-#' is deprecated. You should explicitly choose
-#' the function that will read the desired IPC format (stream or file) since
-#' a file or `InputStream` may contain either.
-#'
-#' @param file A character file name or URI, `raw` vector, an Arrow input stream,
-#' or a `FileSystem` with path (`SubTreeFileSystem`).
-#' If a file name or URI, an Arrow [InputStream] will be opened and
-#' closed when finished. If an input stream is provided, it will be left
-#' open.
-#' @param as_data_frame Should the function return a `data.frame` (default) or
-#' an Arrow [Table]?
-#' @param ... extra parameters passed to `read_feather()`.
-#'
-#' @return A `data.frame` if `as_data_frame` is `TRUE` (the default), or an
-#' Arrow [Table] otherwise
-#' @seealso [read_feather()] for writing IPC files. [RecordBatchReader] for a
-#' lower-level interface.
-#' @export
-read_ipc_stream <- function(file, as_data_frame = TRUE, ...) {
-  if (!inherits(file, "InputStream")) {
-    file <- make_readable_file(file)
-    on.exit(file$close())
-  }
-
-  # TODO: this could take col_select, like the other readers
-  # https://issues.apache.org/jira/browse/ARROW-6830
-  out <- RecordBatchStreamReader$create(file)$read_table()
-  if (as_data_frame) {
-    out <- as.data.frame(out)
-  }
-  out
-}
diff --git a/r/R/json.R b/r/R/json.R
deleted file mode 100644
index 89595a5..0000000
--- a/r/R/json.R
+++ /dev/null
@@ -1,104 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' Read a JSON file
-#'
-#' Using [JsonTableReader]
-#'
-#' @inheritParams read_delim_arrow
-#' @param schema [Schema] that describes the table.
-#' @param ... Additional options passed to `JsonTableReader$create()`
-#'
-#' @return A `data.frame`, or a Table if `as_data_frame = FALSE`.
-#' @export
-#' @examples
-#' \donttest{
-#'   tf <- tempfile()
-#'   on.exit(unlink(tf))
-#'   writeLines('
-#'     { "hello": 3.5, "world": false, "yo": "thing" }
-#'     { "hello": 3.25, "world": null }
-#'     { "hello": 0.0, "world": true, "yo": null }
-#'   ', tf, useBytes=TRUE)
-#'   df <- read_json_arrow(tf)
-#' }
-read_json_arrow <- function(file,
-                            col_select = NULL,
-                            as_data_frame = TRUE,
-                            schema = NULL,
-                            ...) {
-  if (!inherits(file, "InputStream")) {
-    file <- make_readable_file(file)
-    on.exit(file$close())
-  }
-  tab <- JsonTableReader$create(file, schema = schema, ...)$Read()
-
-  col_select <- enquo(col_select)
-  if (!quo_is_null(col_select)) {
-    tab <- tab[vars_select(names(tab), !!col_select)]
-  }
-
-  if (isTRUE(as_data_frame)) {
-    tab <- as.data.frame(tab)
-  }
-  tab
-}
-
-#' @include arrow-package.R
-#' @rdname CsvTableReader
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @export
-JsonTableReader <- R6Class("JsonTableReader", inherit = ArrowObject,
-  public = list(
-    Read = function() json___TableReader__Read(self)
-  )
-)
-JsonTableReader$create <- function(file,
-                                   read_options = JsonReadOptions$create(),
-                                   parse_options = JsonParseOptions$create(schema = schema),
-                                   schema = NULL,
-                                   ...) {
-  assert_is(file, "InputStream")
-  json___TableReader__Make(file, read_options, parse_options)
-}
-
-#' @rdname CsvReadOptions
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @export
-JsonReadOptions <- R6Class("JsonReadOptions", inherit = ArrowObject)
-JsonReadOptions$create <- function(use_threads = option_use_threads(), block_size = 1048576L) {
-  json___ReadOptions__initialize(use_threads, block_size)
-}
-
-#' @rdname CsvReadOptions
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#' @export
-JsonParseOptions <- R6Class("JsonParseOptions", inherit = ArrowObject)
-JsonParseOptions$create <- function(newlines_in_values = FALSE, schema = NULL) {
-  if (is.null(schema)) {
-    json___ParseOptions__initialize1(newlines_in_values)
-  } else {
-    json___ParseOptions__initialize2(newlines_in_values, schema)
-  }
-  
-}
diff --git a/r/R/memory-pool.R b/r/R/memory-pool.R
deleted file mode 100644
index 2207ed6..0000000
--- a/r/R/memory-pool.R
+++ /dev/null
@@ -1,61 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include arrow-package.R
-#'
-#' @title class arrow::MemoryPool
-#'
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#'
-#' @section Methods:
-#'
-#' - `backend_name`: one of "jemalloc", "mimalloc", or "system". Alternative
-#'   memory allocators are optionally enabled at build time. Windows builds
-#'   generally have `mimalloc`, and most others have both `jemalloc` (used by
-#'   default) and `mimalloc`. To change memory allocators at runtime, set the
-#'   environment variable `ARROW_DEFAULT_MEMORY_POOL` to one of those strings
-#'   prior to loading the `arrow` library.
-#' - `bytes_allocated`
-#' - `max_memory`
-#'
-#' @rdname MemoryPool
-#' @name MemoryPool
-#' @keywords internal
-MemoryPool <- R6Class("MemoryPool",
-  inherit = ArrowObject,
-  public = list(
-    # TODO: Allocate
-    # TODO: Reallocate
-    # TODO: Free
-  ),
-  active = list(
-    backend_name = function() MemoryPool__backend_name(self),
-    bytes_allocated = function() MemoryPool__bytes_allocated(self),
-    max_memory = function() MemoryPool__max_memory(self)
-  )
-)
-
-#' Arrow's default [MemoryPool]
-#'
-#' @return the default [MemoryPool]
-#' @export
-#' @keywords internal
-default_memory_pool <- function() {
-  MemoryPool__default()
-}
diff --git a/r/R/message.R b/r/R/message.R
deleted file mode 100644
index 6a374a2..0000000
--- a/r/R/message.R
+++ /dev/null
@@ -1,95 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @include arrow-package.R
-
-#' @title class arrow::Message
-#'
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#'
-#' @section Methods:
-#'
-#' TODO
-#'
-#' @rdname Message
-#' @name Message
-Message <- R6Class("Message", inherit = ArrowObject,
-  public = list(
-    Equals = function(other, ...) {
-      inherits(other, "Message") && ipc___Message__Equals(self, other)
-    },
-    body_length = function() ipc___Message__body_length(self),
-    Verify = function() ipc___Message__Verify(self)
-  ),
-  active = list(
-    type = function() ipc___Message__type(self),
-    metadata = function() ipc___Message__metadata(self),
-    body = function() ipc___Message__body(self)
-  )
-)
-
-#' @title class arrow::MessageReader
-#'
-#' @usage NULL
-#' @format NULL
-#' @docType class
-#'
-#' @section Methods:
-#'
-#' TODO
-#'
-#' @rdname MessageReader
-#' @name MessageReader
-#' @export
-MessageReader <- R6Class("MessageReader", inherit = ArrowObject,
-  public = list(
-    ReadNextMessage = function() ipc___MessageReader__ReadNextMessage(self)
-  )
-)
-
-MessageReader$create <- function(stream) {
-  if (!inherits(stream, "InputStream")) {
-    stream <- BufferReader$create(stream)
-  }
-  ipc___MessageReader__Open(stream)
-}
-
-#' Read a Message from a stream
-#'
-#' @param stream an InputStream
-#'
-#' @export
-read_message <- function(stream) {
-  UseMethod("read_message")
-}
-
-#' @export
-read_message.default <- function(stream) {
-  read_message(BufferReader$create(stream))
-}
-
-#' @export
-read_message.InputStream <- function(stream) {
-  ipc___ReadMessage(stream)
-}
-
-#' @export
-read_message.MessageReader <- function(stream) {
-  stream$ReadNextMessage()
-}
diff --git a/r/R/metadata.R b/r/R/metadata.R
deleted file mode 100644
index d3e5e21..0000000
--- a/r/R/metadata.R
+++ /dev/null
@@ -1,132 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#' @importFrom utils object.size
-.serialize_arrow_r_metadata <- function(x) {
-  assert_is(x, "list")
-
-  # drop problems attributes (most likely from readr)
-  x[["attributes"]][["problems"]] <- NULL
... 38468 lines suppressed ...

[arrow-rs] 14/14: Removed DataFusion and Ballista.

Posted by jo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jorgecarleitao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git

commit a889ebac9d616b50426a55210e12c21697e7c43a
Author: Jorge C. Leitao <jo...@gmail.com>
AuthorDate: Sun Apr 18 14:27:44 2021 +0000

    Removed DataFusion and Ballista.
---
 .dockerignore                                      |     3 -
 .github/workflows/dev_pr/labeler.yml               |     6 -
 rust/ballista/.dockerignore                        |    18 -
 rust/ballista/README.md                            |    64 -
 rust/ballista/dev/build-rust-base.sh               |    21 -
 rust/ballista/dev/build-rust.sh                    |    24 -
 rust/ballista/dev/integration-tests.sh             |    28 -
 rust/ballista/docker/README.md                     |    29 -
 rust/ballista/docker/rust-base.dockerfile          |    99 -
 rust/ballista/docker/rust.dockerfile               |    71 -
 rust/ballista/docs/README.md                       |    37 -
 rust/ballista/docs/architecture.md                 |    75 -
 rust/ballista/docs/dev-env-rust.md                 |    38 -
 rust/ballista/docs/images/query-execution.png      |   Bin 11378 -> 0 bytes
 rust/ballista/docs/integration-testing.md          |    32 -
 rust/ballista/docs/release-process.md              |    68 -
 rust/ballista/docs/rust-docker.md                  |    66 -
 rust/ballista/docs/user-guide/.gitignore           |     2 -
 rust/ballista/docs/user-guide/README.md            |    36 -
 rust/ballista/docs/user-guide/book.toml            |    23 -
 rust/ballista/docs/user-guide/src/SUMMARY.md       |    30 -
 rust/ballista/docs/user-guide/src/client-rust.md   |    22 -
 rust/ballista/docs/user-guide/src/clients.md       |    22 -
 rust/ballista/docs/user-guide/src/configuration.md |    32 -
 rust/ballista/docs/user-guide/src/deployment.md    |    26 -
 .../ballista/docs/user-guide/src/docker-compose.md |    55 -
 rust/ballista/docs/user-guide/src/faq.md           |    31 -
 .../user-guide/src/img/ballista-architecture.png   |   Bin 21225 -> 0 bytes
 rust/ballista/docs/user-guide/src/introduction.md  |    52 -
 rust/ballista/docs/user-guide/src/kubernetes.md    |   216 -
 rust/ballista/docs/user-guide/src/standalone.md    |    92 -
 rust/ballista/rust/.dockerignore                   |    23 -
 rust/ballista/rust/.gitignore                      |     2 -
 rust/ballista/rust/Cargo.toml                      |    30 -
 rust/ballista/rust/benchmarks/tpch/.dockerignore   |    25 -
 rust/ballista/rust/benchmarks/tpch/.gitignore      |     1 -
 rust/ballista/rust/benchmarks/tpch/Cargo.toml      |    36 -
 rust/ballista/rust/benchmarks/tpch/README.md       |   104 -
 .../rust/benchmarks/tpch/docker-compose.yaml       |    62 -
 rust/ballista/rust/benchmarks/tpch/entrypoint.sh   |    22 -
 rust/ballista/rust/benchmarks/tpch/queries/q1.sql  |    21 -
 rust/ballista/rust/benchmarks/tpch/queries/q10.sql |    31 -
 rust/ballista/rust/benchmarks/tpch/queries/q11.sql |    27 -
 rust/ballista/rust/benchmarks/tpch/queries/q12.sql |    30 -
 rust/ballista/rust/benchmarks/tpch/queries/q13.sql |    20 -
 rust/ballista/rust/benchmarks/tpch/queries/q14.sql |    13 -
 rust/ballista/rust/benchmarks/tpch/queries/q16.sql |    30 -
 rust/ballista/rust/benchmarks/tpch/queries/q17.sql |    17 -
 rust/ballista/rust/benchmarks/tpch/queries/q18.sql |    32 -
 rust/ballista/rust/benchmarks/tpch/queries/q19.sql |    35 -
 rust/ballista/rust/benchmarks/tpch/queries/q2.sql  |    43 -
 rust/ballista/rust/benchmarks/tpch/queries/q20.sql |    37 -
 rust/ballista/rust/benchmarks/tpch/queries/q21.sql |    39 -
 rust/ballista/rust/benchmarks/tpch/queries/q22.sql |    37 -
 rust/ballista/rust/benchmarks/tpch/queries/q3.sql  |    22 -
 rust/ballista/rust/benchmarks/tpch/queries/q4.sql  |    21 -
 rust/ballista/rust/benchmarks/tpch/queries/q5.sql  |    24 -
 rust/ballista/rust/benchmarks/tpch/queries/q6.sql  |     9 -
 rust/ballista/rust/benchmarks/tpch/queries/q7.sql  |    39 -
 rust/ballista/rust/benchmarks/tpch/queries/q8.sql  |    37 -
 rust/ballista/rust/benchmarks/tpch/queries/q9.sql  |    32 -
 rust/ballista/rust/benchmarks/tpch/run.sh          |    25 -
 rust/ballista/rust/benchmarks/tpch/src/main.rs     |   360 -
 rust/ballista/rust/benchmarks/tpch/tpch-gen.sh     |    33 -
 .../rust/benchmarks/tpch/tpchgen.dockerfile        |    32 -
 rust/ballista/rust/client/Cargo.toml               |    35 -
 rust/ballista/rust/client/README.md                |    22 -
 rust/ballista/rust/client/src/columnar_batch.rs    |   167 -
 rust/ballista/rust/client/src/context.rs           |   400 -
 rust/ballista/rust/client/src/lib.rs               |    20 -
 rust/ballista/rust/client/src/prelude.rs           |    23 -
 rust/ballista/rust/core/Cargo.toml                 |    50 -
 rust/ballista/rust/core/README.md                  |    21 -
 rust/ballista/rust/core/build.rs                   |    26 -
 rust/ballista/rust/core/proto/ballista.proto       |   824 --
 rust/ballista/rust/core/src/client.rs              |   224 -
 rust/ballista/rust/core/src/datasource.rs          |    72 -
 rust/ballista/rust/core/src/error.rs               |   172 -
 rust/ballista/rust/core/src/execution_plans/mod.rs |    27 -
 .../rust/core/src/execution_plans/query_stage.rs   |    92 -
 .../core/src/execution_plans/shuffle_reader.rs     |   106 -
 .../core/src/execution_plans/unresolved_shuffle.rs |   101 -
 rust/ballista/rust/core/src/lib.rs                 |    34 -
 rust/ballista/rust/core/src/memory_stream.rs       |    93 -
 .../rust/core/src/serde/logical_plan/from_proto.rs |  1200 --
 .../rust/core/src/serde/logical_plan/mod.rs        |   929 --
 .../rust/core/src/serde/logical_plan/to_proto.rs   |  1233 --
 rust/ballista/rust/core/src/serde/mod.rs           |    69 -
 .../core/src/serde/physical_plan/from_proto.rs     |   398 -
 .../rust/core/src/serde/physical_plan/mod.rs       |   178 -
 .../rust/core/src/serde/physical_plan/to_proto.rs  |   556 -
 .../rust/core/src/serde/scheduler/from_proto.rs    |   124 -
 rust/ballista/rust/core/src/serde/scheduler/mod.rs |   262 -
 .../rust/core/src/serde/scheduler/to_proto.rs      |    90 -
 rust/ballista/rust/core/src/utils.rs               |   327 -
 rust/ballista/rust/executor/Cargo.toml             |    59 -
 rust/ballista/rust/executor/README.md              |    31 -
 rust/ballista/rust/executor/build.rs               |    24 -
 .../executor/examples/example_executor_config.toml |    22 -
 .../rust/executor/executor_config_spec.toml        |    79 -
 rust/ballista/rust/executor/src/collect.rs         |   127 -
 rust/ballista/rust/executor/src/execution_loop.rs  |   172 -
 rust/ballista/rust/executor/src/flight_service.rs  |   374 -
 rust/ballista/rust/executor/src/lib.rs             |    52 -
 rust/ballista/rust/executor/src/main.rs            |   176 -
 rust/ballista/rust/scheduler/Cargo.toml            |    66 -
 rust/ballista/rust/scheduler/README.md             |    51 -
 rust/ballista/rust/scheduler/build.rs              |    24 -
 .../rust/scheduler/scheduler_config_spec.toml      |    60 -
 rust/ballista/rust/scheduler/src/api/handlers.rs   |    55 -
 rust/ballista/rust/scheduler/src/api/mod.rs        |    87 -
 rust/ballista/rust/scheduler/src/lib.rs            |   490 -
 rust/ballista/rust/scheduler/src/main.rs           |   156 -
 rust/ballista/rust/scheduler/src/planner.rs        |   494 -
 rust/ballista/rust/scheduler/src/state/etcd.rs     |   205 -
 rust/ballista/rust/scheduler/src/state/mod.rs      |   880 --
 .../rust/scheduler/src/state/standalone.rs         |   228 -
 rust/ballista/rust/scheduler/src/test_utils.rs     |   148 -
 .../rust/scheduler/testdata/customer/customer.tbl  |    10 -
 .../scheduler/testdata/lineitem/partition0.tbl     |    10 -
 .../scheduler/testdata/lineitem/partition1.tbl     |    10 -
 .../rust/scheduler/testdata/nation/nation.tbl      |    10 -
 .../rust/scheduler/testdata/orders/orders.tbl      |    10 -
 .../ballista/rust/scheduler/testdata/part/part.tbl |    10 -
 .../rust/scheduler/testdata/partsupp/partsupp.tbl  |    10 -
 .../rust/scheduler/testdata/region/region.tbl      |     5 -
 .../rust/scheduler/testdata/supplier/supplier.tbl  |    10 -
 rust/ballista/ui/scheduler/.gitignore              |    23 -
 rust/ballista/ui/scheduler/README.md               |    45 -
 rust/ballista/ui/scheduler/index.d.ts              |    18 -
 rust/ballista/ui/scheduler/package.json            |    58 -
 rust/ballista/ui/scheduler/public/favicon.ico      |   Bin 3870 -> 0 bytes
 rust/ballista/ui/scheduler/public/index.html       |    62 -
 rust/ballista/ui/scheduler/public/logo192.png      |   Bin 5347 -> 0 bytes
 rust/ballista/ui/scheduler/public/logo512.png      |   Bin 9664 -> 0 bytes
 rust/ballista/ui/scheduler/public/manifest.json    |    25 -
 rust/ballista/ui/scheduler/public/robots.txt       |    20 -
 rust/ballista/ui/scheduler/react-table-config.d.ts |   137 -
 rust/ballista/ui/scheduler/src/App.css             |    18 -
 rust/ballista/ui/scheduler/src/App.test.tsx        |    26 -
 rust/ballista/ui/scheduler/src/App.tsx             |    97 -
 .../ui/scheduler/src/components/DataTable.tsx      |   131 -
 .../ballista/ui/scheduler/src/components/Empty.tsx |    36 -
 .../ui/scheduler/src/components/Footer.tsx         |    28 -
 .../ui/scheduler/src/components/Header.tsx         |    82 -
 .../ui/scheduler/src/components/NodesList.tsx      |    71 -
 .../ui/scheduler/src/components/QueriesList.tsx    |   115 -
 .../ui/scheduler/src/components/Summary.tsx        |    89 -
 rust/ballista/ui/scheduler/src/components/logo.svg |    25 -
 rust/ballista/ui/scheduler/src/index.css           |    32 -
 rust/ballista/ui/scheduler/src/index.tsx           |    38 -
 rust/ballista/ui/scheduler/src/react-app-env.d.ts  |    18 -
 rust/ballista/ui/scheduler/src/reportWebVitals.ts  |    32 -
 rust/ballista/ui/scheduler/src/setupTests.ts       |    22 -
 rust/ballista/ui/scheduler/tsconfig.json           |    28 -
 rust/ballista/ui/scheduler/yarn.lock               | 12431 -------------------
 rust/benchmarks/Cargo.toml                         |    42 -
 rust/benchmarks/README.md                          |   120 -
 rust/benchmarks/src/bin/nyctaxi.rs                 |   151 -
 rust/benchmarks/src/bin/tpch.rs                    |  1692 ---
 rust/datafusion-examples/Cargo.toml                |    39 -
 rust/datafusion-examples/examples/README.md        |    28 -
 rust/datafusion-examples/examples/csv_sql.rs       |    52 -
 rust/datafusion-examples/examples/dataframe.rs     |    47 -
 .../examples/dataframe_in_memory.rs                |    67 -
 rust/datafusion-examples/examples/flight_client.rs |    79 -
 rust/datafusion-examples/examples/flight_server.rs |   213 -
 rust/datafusion-examples/examples/parquet_sql.rs   |    50 -
 rust/datafusion-examples/examples/simple_udaf.rs   |   170 -
 rust/datafusion-examples/examples/simple_udf.rs    |   151 -
 rust/datafusion/Cargo.toml                         |    99 -
 rust/datafusion/DEVELOPERS.md                      |    92 -
 rust/datafusion/Dockerfile                         |    25 -
 rust/datafusion/README.md                          |   356 -
 rust/datafusion/benches/aggregate_query_sql.rs     |   248 -
 rust/datafusion/benches/filter_query_sql.rs        |    91 -
 rust/datafusion/benches/math_query_sql.rs          |   111 -
 rust/datafusion/benches/scalar.rs                  |    30 -
 rust/datafusion/benches/sort_limit_query_sql.rs    |   148 -
 rust/datafusion/docs/cli.md                        |    95 -
 .../docs/images/DataFusion-Logo-Dark.png           |   Bin 20134 -> 0 bytes
 .../docs/images/DataFusion-Logo-Dark.svg           |     1 -
 .../docs/images/DataFusion-Logo-Light.png          |   Bin 19102 -> 0 bytes
 .../docs/images/DataFusion-Logo-Light.svg          |     1 -
 rust/datafusion/src/bin/main.rs                    |    25 -
 rust/datafusion/src/bin/repl.rs                    |   140 -
 rust/datafusion/src/catalog/catalog.rs             |   139 -
 rust/datafusion/src/catalog/information_schema.rs  |   492 -
 rust/datafusion/src/catalog/mod.rs                 |   146 -
 rust/datafusion/src/catalog/schema.rs              |   104 -
 rust/datafusion/src/dataframe.rs                   |   286 -
 rust/datafusion/src/datasource/csv.rs              |   144 -
 rust/datafusion/src/datasource/datasource.rs       |   103 -
 rust/datafusion/src/datasource/empty.rs            |    80 -
 rust/datafusion/src/datasource/memory.rs           |   472 -
 rust/datafusion/src/datasource/mod.rs              |    28 -
 rust/datafusion/src/datasource/parquet.rs          |   373 -
 rust/datafusion/src/error.rs                       |   120 -
 rust/datafusion/src/execution/context.rs           |  3123 -----
 rust/datafusion/src/execution/dataframe_impl.rs    |   374 -
 rust/datafusion/src/execution/mod.rs               |    21 -
 rust/datafusion/src/lib.rs                         |   211 -
 rust/datafusion/src/logical_plan/builder.rs        |   595 -
 rust/datafusion/src/logical_plan/dfschema.rs       |   521 -
 rust/datafusion/src/logical_plan/display.rs        |   270 -
 rust/datafusion/src/logical_plan/expr.rs           |  1505 ---
 rust/datafusion/src/logical_plan/extension.rs      |    79 -
 rust/datafusion/src/logical_plan/mod.rs            |    50 -
 rust/datafusion/src/logical_plan/operators.rs      |   135 -
 rust/datafusion/src/logical_plan/plan.rs           |  1095 --
 rust/datafusion/src/logical_plan/registry.rs       |    34 -
 rust/datafusion/src/optimizer/constant_folding.rs  |   591 -
 rust/datafusion/src/optimizer/filter_push_down.rs  |  1021 --
 .../src/optimizer/hash_build_probe_order.rs        |   257 -
 rust/datafusion/src/optimizer/limit_push_down.rs   |   252 -
 rust/datafusion/src/optimizer/mod.rs               |    27 -
 rust/datafusion/src/optimizer/optimizer.rs         |    32 -
 .../src/optimizer/projection_push_down.rs          |   542 -
 rust/datafusion/src/optimizer/utils.rs             |   489 -
 .../src/physical_optimizer/coalesce_batches.rs     |    88 -
 .../src/physical_optimizer/merge_exec.rs           |    74 -
 rust/datafusion/src/physical_optimizer/mod.rs      |    24 -
 .../datafusion/src/physical_optimizer/optimizer.rs |    39 -
 .../src/physical_optimizer/repartition.rs          |   186 -
 rust/datafusion/src/physical_plan/aggregates.rs    |   258 -
 .../src/physical_plan/array_expressions.rs         |   127 -
 .../src/physical_plan/coalesce_batches.rs          |   316 -
 rust/datafusion/src/physical_plan/common.rs        |   104 -
 .../src/physical_plan/crypto_expressions.rs        |   198 -
 rust/datafusion/src/physical_plan/csv.rs           |   401 -
 .../src/physical_plan/datetime_expressions.rs      |   559 -
 .../src/physical_plan/distinct_expressions.rs      |   557 -
 rust/datafusion/src/physical_plan/empty.rs         |   186 -
 rust/datafusion/src/physical_plan/explain.rs       |   125 -
 .../src/physical_plan/expressions/average.rs       |   293 -
 .../src/physical_plan/expressions/binary.rs        |  1101 --
 .../src/physical_plan/expressions/case.rs          |   597 -
 .../src/physical_plan/expressions/cast.rs          |   301 -
 .../src/physical_plan/expressions/coercion.rs      |   208 -
 .../src/physical_plan/expressions/column.rs        |    86 -
 .../src/physical_plan/expressions/count.rs         |   235 -
 .../src/physical_plan/expressions/in_list.rs       |   458 -
 .../src/physical_plan/expressions/is_not_null.rs   |   119 -
 .../src/physical_plan/expressions/is_null.rs       |   119 -
 .../src/physical_plan/expressions/literal.rs       |   108 -
 .../src/physical_plan/expressions/min_max.rs       |   655 -
 .../src/physical_plan/expressions/mod.rs           |   135 -
 .../src/physical_plan/expressions/negative.rs      |   133 -
 .../src/physical_plan/expressions/not.rs           |   158 -
 .../src/physical_plan/expressions/nullif.rs        |   188 -
 .../src/physical_plan/expressions/sum.rs           |   373 -
 .../src/physical_plan/expressions/try_cast.rs      |   247 -
 rust/datafusion/src/physical_plan/filter.rs        |   240 -
 rust/datafusion/src/physical_plan/functions.rs     |  3767 ------
 rust/datafusion/src/physical_plan/group_scalar.rs  |   212 -
 .../datafusion/src/physical_plan/hash_aggregate.rs |  1395 ---
 rust/datafusion/src/physical_plan/hash_join.rs     |  1265 --
 rust/datafusion/src/physical_plan/hash_utils.rs    |   201 -
 rust/datafusion/src/physical_plan/limit.rs         |   338 -
 .../src/physical_plan/math_expressions.rs          |   118 -
 rust/datafusion/src/physical_plan/memory.rs        |   161 -
 rust/datafusion/src/physical_plan/merge.rs         |   225 -
 rust/datafusion/src/physical_plan/mod.rs           |   369 -
 rust/datafusion/src/physical_plan/parquet.rs       |  1535 ---
 rust/datafusion/src/physical_plan/planner.rs       |  1106 --
 rust/datafusion/src/physical_plan/projection.rs    |   232 -
 .../src/physical_plan/regex_expressions.rs         |   172 -
 rust/datafusion/src/physical_plan/repartition.rs   |   461 -
 rust/datafusion/src/physical_plan/sort.rs          |   478 -
 .../src/physical_plan/string_expressions.rs        |   595 -
 rust/datafusion/src/physical_plan/type_coercion.rs |   361 -
 rust/datafusion/src/physical_plan/udaf.rs          |   168 -
 rust/datafusion/src/physical_plan/udf.rs           |   112 -
 .../src/physical_plan/unicode_expressions.rs       |   532 -
 rust/datafusion/src/physical_plan/union.rs         |   143 -
 rust/datafusion/src/prelude.rs                     |    37 -
 rust/datafusion/src/scalar.rs                      |   821 --
 rust/datafusion/src/sql/mod.rs                     |    23 -
 rust/datafusion/src/sql/parser.rs                  |   380 -
 rust/datafusion/src/sql/planner.rs                 |  2723 ----
 rust/datafusion/src/sql/utils.rs                   |   376 -
 rust/datafusion/src/test/exec.rs                   |   102 -
 rust/datafusion/src/test/mod.rs                    |   346 -
 rust/datafusion/src/test/user_defined.rs           |    76 -
 rust/datafusion/src/test/variable.rs               |    58 -
 rust/datafusion/src/variable/mod.rs                |    36 -
 rust/datafusion/tests/aggregate_simple.csv         |    16 -
 rust/datafusion/tests/custom_sources.rs            |   200 -
 rust/datafusion/tests/customer.csv                 |     4 -
 rust/datafusion/tests/dataframe.rs                 |    79 -
 rust/datafusion/tests/example.csv                  |     2 -
 rust/datafusion/tests/provider_filter_pushdown.rs  |   177 -
 rust/datafusion/tests/sql.rs                       |  2707 ----
 rust/datafusion/tests/user_defined_plan.rs         |   512 -
 294 files changed, 78087 deletions(-)

diff --git a/.dockerignore b/.dockerignore
index eb71138..36732a2 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -49,7 +49,6 @@
 !ruby/red-plasma/lib/plasma/version.rb
 !ruby/red-plasma/red-plasma.gemspec
 !rust/Cargo.toml
-!rust/benchmarks/Cargo.toml
 !rust/arrow/Cargo.toml
 !rust/arrow/benches
 !rust/arrow-flight/Cargo.toml
@@ -57,6 +56,4 @@
 !rust/parquet/build.rs
 !rust/parquet_derive/Cargo.toml
 !rust/parquet_derive_test/Cargo.toml
-!rust/datafusion/Cargo.toml
-!rust/datafusion/benches
 !rust/integration-testing/Cargo.toml
diff --git a/.github/workflows/dev_pr/labeler.yml b/.github/workflows/dev_pr/labeler.yml
index 098e1ba..65ca3f2 100644
--- a/.github/workflows/dev_pr/labeler.yml
+++ b/.github/workflows/dev_pr/labeler.yml
@@ -48,12 +48,6 @@ lang-ruby:
 lang-rust:
   - rust/**/*
 
-datafusion:
-  - rust/datafusion/**/*
-
-ballista:
-  - rust/ballista/**/*
-
 flight:
   - cpp/src/arrow/flight/**/*
   - r/R/flight.*
diff --git a/rust/ballista/.dockerignore b/rust/ballista/.dockerignore
deleted file mode 100644
index 3cde49e..0000000
--- a/rust/ballista/.dockerignore
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-rust/**/target
diff --git a/rust/ballista/README.md b/rust/ballista/README.md
deleted file mode 100644
index 288386f..0000000
--- a/rust/ballista/README.md
+++ /dev/null
@@ -1,64 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Ballista: Distributed Compute with Apache Arrow
-
-Ballista is a distributed compute platform primarily implemented in Rust, and powered by Apache Arrow. It is built 
-on an architecture that allows other programming languages (such as Python, C++, and Java) to be supported as 
-first-class citizens without paying a penalty for serialization costs.
-
-The foundational technologies in Ballista are:
-
-- [Apache Arrow](https://arrow.apache.org/) memory model and compute kernels for efficient processing of data.
-- [Apache Arrow Flight Protocol](https://arrow.apache.org/blog/2019/10/13/introducing-arrow-flight/) for efficient 
-  data transfer between processes.
-- [Google Protocol Buffers](https://developers.google.com/protocol-buffers) for serializing query plans.
-- [Docker](https://www.docker.com/) for packaging up executors along with user-defined code.
-
-Ballista can be deployed as a standalone cluster and also supports [Kubernetes](https://kubernetes.io/). In either
-case, the scheduler can be configured to use [etcd](https://etcd.io/) as a backing store to (eventually) provide
-redundancy in the case of a scheduler failing.
-
-# How does this compare to Apache Spark?
-
-Although Ballista is largely inspired by Apache Spark, there are some key differences.
-
-- The choice of Rust as the main execution language means that memory usage is deterministic and avoids the overhead of
-  GC pauses.
-- Ballista is designed from the ground up to use columnar data, enabling a number of efficiencies such as vectorized
-  processing (SIMD and GPU) and efficient compression. Although Spark does have some columnar support, it is still
-  largely row-based today.
-- The combination of Rust and Arrow provides excellent memory efficiency and memory usage can be 5x - 10x lower than
-  Apache Spark in some cases, which means that more processing can fit on a single node, reducing the overhead of
-  distributed compute.
-- The use of Apache Arrow as the memory model and network protocol means that data can be exchanged between executors
-  in any programming language with minimal serialization overhead.
-
-# Status
-
-The Ballista project was donated to Apache Arrow in April 2021 and work is underway to integrate more tightly with 
-DataFusion.
-
-One of the goals is to implement a common scheduler that can seamlessly scale queries across cores in DataFusion and 
-across nodes in Ballista.
-
-Ballista issues are tracked in ASF JIRA [here](https://issues.apache.org/jira/issues/?jql=project%20%3D%20ARROW%20AND%20component%20%3D%20%22Rust%20-%20Ballista%22)
-
-
-
diff --git a/rust/ballista/dev/build-rust-base.sh b/rust/ballista/dev/build-rust-base.sh
deleted file mode 100755
index ee4b32c..0000000
--- a/rust/ballista/dev/build-rust-base.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-BALLISTA_VERSION=0.4.2-SNAPSHOT
-set -e
-docker build -t ballistacompute/rust-base:$BALLISTA_VERSION -f docker/rust-base.dockerfile .
diff --git a/rust/ballista/dev/build-rust.sh b/rust/ballista/dev/build-rust.sh
deleted file mode 100755
index 1916f8e..0000000
--- a/rust/ballista/dev/build-rust.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-BALLISTA_VERSION=0.4.2-SNAPSHOT
-
-set -e
-
-docker build -t ballistacompute/ballista-rust:$BALLISTA_VERSION -f docker/rust.dockerfile .
diff --git a/rust/ballista/dev/integration-tests.sh b/rust/ballista/dev/integration-tests.sh
deleted file mode 100755
index cc34a5c..0000000
--- a/rust/ballista/dev/integration-tests.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-set -e
-./dev/build-rust.sh
-pushd rust/benchmarks/tpch
-./tpch-gen.sh
-
-docker-compose up -d
-docker-compose run ballista-client ./run.sh
-docker-compose down
-
-popd
diff --git a/rust/ballista/docker/README.md b/rust/ballista/docker/README.md
deleted file mode 100644
index 8417d04..0000000
--- a/rust/ballista/docker/README.md
+++ /dev/null
@@ -1,29 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Ballista Docker Images
-
-Pre-built docker images are available from [Docker Hub](https://hub.docker.com/orgs/ballistacompute/repositories) but here are the commands to build the images from source.
-
-Run these commands from the root directory of the project.
-
-```bash
-./dev/build-all.sh
-```
-
diff --git a/rust/ballista/docker/rust-base.dockerfile b/rust/ballista/docker/rust-base.dockerfile
deleted file mode 100644
index 4519225..0000000
--- a/rust/ballista/docker/rust-base.dockerfile
+++ /dev/null
@@ -1,99 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Turn .dockerignore to .dockerallow by excluding everything and explicitly
-# allowing specific files and directories. This enables us to quickly add
-# dependency files to the docker content without scanning the whole directory.
-# This setup requires to all of our docker containers have arrow's source
-# as a mounted directory.
-
-
-# Base image extends debian:buster-slim
-FROM rust:1.49.0-buster AS builder
-
-RUN apt update && apt -y install musl musl-dev musl-tools libssl-dev openssl
-
-#NOTE: the following was copied from https://github.com/emk/rust-musl-builder/blob/master/Dockerfile under Apache 2.0 license
-
-# The OpenSSL version to use. We parameterize this because many Rust
-# projects will fail to build with 1.1.
-#ARG OPENSSL_VERSION=1.0.2r
-ARG OPENSSL_VERSION=1.1.1b
-
-# Build a static library version of OpenSSL using musl-libc.  This is needed by
-# the popular Rust `hyper` crate.
-#
-# We point /usr/local/musl/include/linux at some Linux kernel headers (not
-# necessarily the right ones) in an effort to compile OpenSSL 1.1's "engine"
-# component. It's possible that this will cause bizarre and terrible things to
-# happen. There may be "sanitized" header
-RUN echo "Building OpenSSL" && \
-    ls /usr/include/linux && \
-    mkdir -p /usr/local/musl/include && \
-    ln -s /usr/include/linux /usr/local/musl/include/linux && \
-    ln -s /usr/include/x86_64-linux-gnu/asm /usr/local/musl/include/asm && \
-    ln -s /usr/include/asm-generic /usr/local/musl/include/asm-generic && \
-    cd /tmp && \
-    curl -LO "https://www.openssl.org/source/openssl-$OPENSSL_VERSION.tar.gz" && \
-    tar xvzf "openssl-$OPENSSL_VERSION.tar.gz" && cd "openssl-$OPENSSL_VERSION" && \
-    env CC=musl-gcc ./Configure no-shared no-zlib -fPIC --prefix=/usr/local/musl -DOPENSSL_NO_SECURE_MEMORY linux-x86_64 && \
-    env C_INCLUDE_PATH=/usr/local/musl/include/ make depend && \
-    env C_INCLUDE_PATH=/usr/local/musl/include/ make && \
-    make install && \
-    rm /usr/local/musl/include/linux /usr/local/musl/include/asm /usr/local/musl/include/asm-generic && \
-    rm -r /tmp/*
-
-RUN echo "Building zlib" && \
-    cd /tmp && \
-    ZLIB_VERSION=1.2.11 && \
-    curl -LO "http://zlib.net/zlib-$ZLIB_VERSION.tar.gz" && \
-    tar xzf "zlib-$ZLIB_VERSION.tar.gz" && cd "zlib-$ZLIB_VERSION" && \
-    CC=musl-gcc ./configure --static --prefix=/usr/local/musl && \
-    make && make install && \
-    rm -r /tmp/*
-
-RUN echo "Building libpq" && \
-    cd /tmp && \
-    POSTGRESQL_VERSION=11.2 && \
-    curl -LO "https://ftp.postgresql.org/pub/source/v$POSTGRESQL_VERSION/postgresql-$POSTGRESQL_VERSION.tar.gz" && \
-    tar xzf "postgresql-$POSTGRESQL_VERSION.tar.gz" && cd "postgresql-$POSTGRESQL_VERSION" && \
-    CC=musl-gcc CPPFLAGS=-I/usr/local/musl/include LDFLAGS=-L/usr/local/musl/lib ./configure --with-openssl --without-readline --prefix=/usr/local/musl && \
-    cd src/interfaces/libpq && make all-static-lib && make install-lib-static && \
-    cd ../../bin/pg_config && make && make install && \
-    rm -r /tmp/*
-
-ENV OPENSSL_DIR=/usr/local/musl/ \
-    OPENSSL_INCLUDE_DIR=/usr/local/musl/include/ \
-    DEP_OPENSSL_INCLUDE=/usr/local/musl/include/ \
-    OPENSSL_LIB_DIR=/usr/local/musl/lib/ \
-    OPENSSL_STATIC=1 \
-    PQ_LIB_STATIC_X86_64_UNKNOWN_LINUX_MUSL=1 \
-    PG_CONFIG_X86_64_UNKNOWN_LINUX_GNU=/usr/bin/pg_config \
-    PKG_CONFIG_ALLOW_CROSS=true \
-    PKG_CONFIG_ALL_STATIC=true \
-    LIBZ_SYS_STATIC=1 \
-    TARGET=musl
-
-# The content copied mentioned in the NOTE above ends here.
-
-## Download the target for static linking.
-RUN rustup target add x86_64-unknown-linux-musl
-RUN cargo install cargo-build-deps
-
-# prepare toolchain
-RUN rustup update && \
-    rustup component add rustfmt
\ No newline at end of file
diff --git a/rust/ballista/docker/rust.dockerfile b/rust/ballista/docker/rust.dockerfile
deleted file mode 100644
index 8b06af3..0000000
--- a/rust/ballista/docker/rust.dockerfile
+++ /dev/null
@@ -1,71 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Turn .dockerignore to .dockerallow by excluding everything and explicitly
-# allowing specific files and directories. This enables us to quickly add
-# dependency files to the docker content without scanning the whole directory.
-# This setup requires to all of our docker containers have arrow's source
-# as a mounted directory.
-
-ARG RELEASE_FLAG=--release
-FROM ballistacompute/rust-base:0.4.0-20210213 AS base
-WORKDIR /tmp/ballista
-RUN apt-get -y install cmake
-RUN cargo install cargo-chef 
-
-FROM base as planner
-COPY rust .
-RUN cargo chef prepare --recipe-path recipe.json
-
-FROM base as cacher
-COPY --from=planner /tmp/ballista/recipe.json recipe.json
-RUN cargo chef cook $RELEASE_FLAG --recipe-path recipe.json
-
-FROM base as builder
-COPY rust .
-COPY --from=cacher /tmp/ballista/target target
-ARG RELEASE_FLAG=--release
-
-# force build.rs to run to generate configure_me code.
-ENV FORCE_REBUILD='true'
-RUN cargo build $RELEASE_FLAG
-
-# put the executor on /executor (need to be copied from different places depending on FLAG)
-ENV RELEASE_FLAG=${RELEASE_FLAG}
-RUN if [ -z "$RELEASE_FLAG" ]; then mv /tmp/ballista/target/debug/ballista-executor /executor; else mv /tmp/ballista/target/release/ballista-executor /executor; fi
-
-# put the scheduler on /scheduler (need to be copied from different places depending on FLAG)
-ENV RELEASE_FLAG=${RELEASE_FLAG}
-RUN if [ -z "$RELEASE_FLAG" ]; then mv /tmp/ballista/target/debug/ballista-scheduler /scheduler; else mv /tmp/ballista/target/release/ballista-scheduler /scheduler; fi
-
-# put the tpch on /tpch (need to be copied from different places depending on FLAG)
-ENV RELEASE_FLAG=${RELEASE_FLAG}
-RUN if [ -z "$RELEASE_FLAG" ]; then mv /tmp/ballista/target/debug/tpch /tpch; else mv /tmp/ballista/target/release/tpch /tpch; fi
-
-# Copy the binary into a new container for a smaller docker image
-FROM ballistacompute/rust-base:0.4.0-20210213
-
-COPY --from=builder /executor /
-
-COPY --from=builder /scheduler /
-
-COPY --from=builder /tpch /
-
-ENV RUST_LOG=info
-ENV RUST_BACKTRACE=full
-
-CMD ["/executor", "--local"]
diff --git a/rust/ballista/docs/README.md b/rust/ballista/docs/README.md
deleted file mode 100644
index 44c831d..0000000
--- a/rust/ballista/docs/README.md
+++ /dev/null
@@ -1,37 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Ballista Developer Documentation
-
-This directory contains documentation for developers that are contributing to Ballista. If you are looking for 
-end-user documentation for a published release, please start with the 
-[Ballista User Guide](https://ballistacompute.org/docs/) instead.
-
-## Architecture & Design
-
-- Read the [Architecture Overview](architecture.md) to get an understanding of the scheduler and executor 
-  processes and how distributed query execution works.
-
-## Build, Test, Release
-
-- Setting up a [Rust development environment](dev-env-rust.md).
-- Setting up a [Java development environment](dev-env-jvm.md).
-- Notes on building [Rust docker images](rust-docker.md)  
-- [Integration Testing](integration-testing.md)
-- [Release process](release-process.md)
-
diff --git a/rust/ballista/docs/architecture.md b/rust/ballista/docs/architecture.md
deleted file mode 100644
index a73b53a..0000000
--- a/rust/ballista/docs/architecture.md
+++ /dev/null
@@ -1,75 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Ballista Architecture
-
-## Overview
-
-Ballista allows queries to be executed in a distributed cluster. A cluster consists of one or 
-more scheduler processes and one or more executor processes. See the following sections in this document for more
-details about these components.
-
-The scheduler accepts logical query plans and translates them into physical query plans using DataFusion and then 
-runs a secondary planning/optimization process to translate the physical query plan into a distributed physical 
-query plan. 
-
-This process breaks a query down into a number of query stages that can be executed independently. There are 
-dependencies between query stages and these dependencies form a directionally-acyclic graph (DAG) because a query 
-stage cannot start until its child query stages have completed.
-
-Each query stage has one or more partitions that can be processed in parallel by the available 
-executors in the cluster. This is the basic unit of scalability in Ballista.
-
-The following diagram shows the flow of requests and responses between the client, scheduler, and executor 
-processes. 
-
-![Query Execution Flow](images/query-execution.png)
-
-## Scheduler Process
-
-The scheduler process implements a gRPC interface (defined in 
-[ballista.proto](../rust/ballista/proto/ballista.proto)). The interface provides the following methods:
-
-| Method               | Description                                                          |
-|----------------------|----------------------------------------------------------------------|
-| ExecuteQuery         | Submit a logical query plan or SQL query for execution               |
-| GetExecutorsMetadata | Retrieves a list of executors that have registered with a scheduler  |
-| GetFileMetadata      | Retrieve metadata about files available in the cluster file system   |
-| GetJobStatus         | Get the status of a submitted query                                  |
-| RegisterExecutor     | Executors call this method to register themselves with the scheduler |
-
-The scheduler can run in standalone mode, or can be run in clustered mode using etcd as backing store for state.
-
-## Executor Process
-
-The executor process implements the Apache Arrow Flight gRPC interface and is responsible for:
-
-- Executing query stages and persisting the results to disk in Apache Arrow IPC Format
-- Making query stage results available as Flights so that they can be retrieved by other executors as well as by 
-  clients
-
-## Rust Client
-
-The Rust client provides a DataFrame API that is a thin wrapper around the DataFusion DataFrame and provides
-the means for a client to build a query plan for execution.
-
-The client executes the query plan by submitting an `ExecuteLogicalPlan` request to the scheduler and then calls
-`GetJobStatus` to check for completion. On completion, the client receives a list of locations for the Flights 
-containing the results for the query and will then connect to the appropriate executor processes to retrieve 
-those results.
-
diff --git a/rust/ballista/docs/dev-env-rust.md b/rust/ballista/docs/dev-env-rust.md
deleted file mode 100644
index bf50c9d..0000000
--- a/rust/ballista/docs/dev-env-rust.md
+++ /dev/null
@@ -1,38 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Setting up a Rust development environment
-
-You will need a standard Rust development environment. The easiest way to achieve this is by using rustup: https://rustup.rs/
-
-## Install OpenSSL
-
-Follow instructions for [setting up OpenSSL](https://docs.rs/openssl/0.10.28/openssl/). For Ubuntu users, the following 
-command works.
-
-```bash
-sudo apt-get install pkg-config libssl-dev
-```
-
-## Install CMake
-
-You'll need cmake in order to compile some of ballista's dependencies. Ubuntu users can use the following command:
-
-```bash
-sudo apt-get install cmake
-```
\ No newline at end of file
diff --git a/rust/ballista/docs/images/query-execution.png b/rust/ballista/docs/images/query-execution.png
deleted file mode 100644
index b352402..0000000
Binary files a/rust/ballista/docs/images/query-execution.png and /dev/null differ
diff --git a/rust/ballista/docs/integration-testing.md b/rust/ballista/docs/integration-testing.md
deleted file mode 100644
index 2a979b6..0000000
--- a/rust/ballista/docs/integration-testing.md
+++ /dev/null
@@ -1,32 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Integration Testing
-
-Ballista has a [benchmark crate](https://github.com/ballista-compute/ballista/tree/main/rust/benchmarks/tpch) which is
-derived from TPC-H and this is currently the main form of integration testing. 
-
-The following command can be used to run the integration tests.
-
-```bash
-./dev/integration-tests.sh
-```
-
-Please refer to the
-[benchmark documentation](https://github.com/ballista-compute/ballista/blob/main/rust/benchmarks/tpch/README.md)
-for more information.
diff --git a/rust/ballista/docs/release-process.md b/rust/ballista/docs/release-process.md
deleted file mode 100644
index c6c45c3..0000000
--- a/rust/ballista/docs/release-process.md
+++ /dev/null
@@ -1,68 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Release Process
-
-These instructions are for project maintainers wishing to create public releases of Ballista.
-
-- Create a `release-0.4` branch or merge latest from `main` into an existing `release-0.4` branch.
-- Update version numbers using `./dev/bump-version.sh`
-- Run integration tests with `./dev/integration-tests.sh`
-- Push changes
-- Create `v0.4.x` release tag from the `release-0.4` branch
-- Publish Docker images
-- Publish crate if possible (if we're using a published version of Arrow)
-
-## Publishing Java artifacts to Maven Central
-
-The JVM artifacts are published to Maven central by uploading to sonatype. You will need to set the environment 
-variables `SONATYPE_USERNAME` and `SONATYPE_PASSWORD` to the correct values for your account and you will also need 
-verified GPG keys available for signing the artifacts (instructions tbd).
-
-Run the follow commands to publish the artifacts to a sonatype staging repository.
-
-```bash
-./dev/publish-jvm.sh
-```
-
-## Publishing Rust Artifacts
-
-Run the following script to publish the Rust crate to crates.io.
-
-```
-./dev/publish-rust.sh
-```
-
-## Publishing Docker Images
-
-Run the following script to publish the executor Docker images to Docker Hub.
-
-```
-./dev/publish-docker-images.sh
-```
-
-## GPG Notes
-
-Refer to [this article](https://help.github.com/en/github/authenticating-to-github/generating-a-new-gpg-key) for 
-instructions on setting up GPG keys. Some useful commands are:
-
-```bash
-gpg --full-generate-key
-gpg --export-secret-keys > ~/.gnupg/secring.gpg
-gpg --key-server keys.openpgp.org --send-keys KEYID
-```
\ No newline at end of file
diff --git a/rust/ballista/docs/rust-docker.md b/rust/ballista/docs/rust-docker.md
deleted file mode 100644
index 0b94a14..0000000
--- a/rust/ballista/docs/rust-docker.md
+++ /dev/null
@@ -1,66 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-### How to build rust's docker image
-
-To build the docker image in development, use
-
-```
-docker build -f docker/rust.dockerfile -t ballistacompute/ballista-rust:latest .
-```
-
-This uses a multi-stage build, on which the build stage is called `builder`.
-Our github has this target cached, that we use to speed-up the build time:
-
-```
-export BUILDER_IMAGE=docker.pkg.github.com/ballista-compute/ballista/ballista-rust-builder:main
-
-docker login docker.pkg.github.com -u ... -p ...  # a personal access token to read from the read:packages
-docker pull $BUILDER_IMAGE
-
-docker build --cache-from $BUILDER_IMAGE -f docker/rust.dockerfile -t ballista:latest .
-```
-
-will build the image by re-using a cached image.
-
-### Docker images for development
-
-This project often requires testing on kubernetes. For this reason, we have a github workflow to push images to 
-github's registry, both from this repo and its forks.
-
-The basic principle is that every push to a git reference builds and publishes a docker image.
-Specifically, given a branch or tag `${REF}`,
-
-* `docker.pkg.github.com/ballista-compute/ballista/ballista-rust:${REF}` is the latest image from $REF
-* `docker.pkg.github.com/${USER}/ballista/ballista-rust:${REF}` is the latest image from $REF on your fork
-
-To pull them from a kubernetes cluster or your computer, you need to have a personal access token with scope `read:packages`,
-and login to the registry `docker.pkg.github.com`.
-
-The builder image - the large image with all the cargo caches - is available on the same registry as described above, and is also
-available in all forks and for all references.
-
-Please refer to the [rust workflow](.github/workflows/rust.yaml) and [rust dockerfile](docker/rust.dockerfile) for details on how we build and publish these images.
-
-### Get the binary
-
-If you do not aim to run this in docker but any linux-based machine, you can get the latest binary from a docker image on the registry: the binary is statically linked and thus runs on any linux-based machine. You can get it using
-
-```
-id=$(docker create $BUILDER_IMAGE) && docker cp $id:/executor executor && docker rm -v $id
-```
diff --git a/rust/ballista/docs/user-guide/.gitignore b/rust/ballista/docs/user-guide/.gitignore
deleted file mode 100644
index e662f99..0000000
--- a/rust/ballista/docs/user-guide/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-ballista-book.tgz
-book
\ No newline at end of file
diff --git a/rust/ballista/docs/user-guide/README.md b/rust/ballista/docs/user-guide/README.md
deleted file mode 100644
index 9ee3e90..0000000
--- a/rust/ballista/docs/user-guide/README.md
+++ /dev/null
@@ -1,36 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Ballista User Guide Source
-
-This directory contains the sources for the user guide that is published at https://ballistacompute.org/docs/.
-
-## Generate HTML
-
-```bash
-cargo install mdbook
-mdbook build
-```
-
-## Deploy User Guide to Web Site
-
-Requires ssh certificate to be available.
-
-```bash
-./deploy.sh
-```
\ No newline at end of file
diff --git a/rust/ballista/docs/user-guide/book.toml b/rust/ballista/docs/user-guide/book.toml
deleted file mode 100644
index cf1653d..0000000
--- a/rust/ballista/docs/user-guide/book.toml
+++ /dev/null
@@ -1,23 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-[book]
-authors = ["Andy Grove"]
-language = "en"
-multilingual = false
-src = "src"
-title = "Ballista User Guide"
diff --git a/rust/ballista/docs/user-guide/src/SUMMARY.md b/rust/ballista/docs/user-guide/src/SUMMARY.md
deleted file mode 100644
index c8fc2c8..0000000
--- a/rust/ballista/docs/user-guide/src/SUMMARY.md
+++ /dev/null
@@ -1,30 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Summary
-
-- [Introduction](introduction.md)
-- [Create a Ballista Cluster](deployment.md)
-  - [Docker](standalone.md)
-  - [Docker Compose](docker-compose.md)
-  - [Kubernetes](kubernetes.md)
-  - [Ballista Configuration](configuration.md)
-- [Clients](clients.md)
-  - [Rust](client-rust.md)
-  - [Python](client-python.md)
-- [Frequently Asked Questions](faq.md)
\ No newline at end of file
diff --git a/rust/ballista/docs/user-guide/src/client-rust.md b/rust/ballista/docs/user-guide/src/client-rust.md
deleted file mode 100644
index 048c10f..0000000
--- a/rust/ballista/docs/user-guide/src/client-rust.md
+++ /dev/null
@@ -1,22 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-## Ballista Rust Client
-
-The Rust client supports a `DataFrame` API as well as SQL. See the 
-[TPC-H Benchmark Client](https://github.com/ballista-compute/ballista/tree/main/rust/benchmarks/tpch) for an example.
\ No newline at end of file
diff --git a/rust/ballista/docs/user-guide/src/clients.md b/rust/ballista/docs/user-guide/src/clients.md
deleted file mode 100644
index 1e223dd..0000000
--- a/rust/ballista/docs/user-guide/src/clients.md
+++ /dev/null
@@ -1,22 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-## Clients
-
-- [Rust](client-rust.md)
-- [Python](client-python.md)
diff --git a/rust/ballista/docs/user-guide/src/configuration.md b/rust/ballista/docs/user-guide/src/configuration.md
deleted file mode 100644
index 52b05b0..0000000
--- a/rust/ballista/docs/user-guide/src/configuration.md
+++ /dev/null
@@ -1,32 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Configuration 
-The rust executor and scheduler can be configured using toml files, environment variables and command line arguments. The specification for config options can be found in `rust/ballista/src/bin/[executor|scheduler]_config_spec.toml`. 
-
-Those files fully define Ballista's configuration. If there is a discrepancy between this documentation and the files, assume those files are correct.
-
-To get a list of command line arguments, run the binary with `--help`
-
-There is an example config file at `ballista/rust/ballista/examples/example_executor_config.toml`
-
-The order of precedence for arguments is: default config file < environment variables < specified config file < command line arguments. 
-
-The executor and scheduler will look for the default config file at `/etc/ballista/[executor|scheduler].toml` To specify a config file use the `--config-file` argument. 
-
-Environment variables are prefixed by `BALLISTA_EXECUTOR` or `BALLISTA_SCHEDULER` for the executor and scheduler respectively. Hyphens in command line arguments become underscores. For example, the `--scheduler-host` argument for the executor becomes `BALLISTA_EXECUTOR_SCHEDULER_HOST`
\ No newline at end of file
diff --git a/rust/ballista/docs/user-guide/src/deployment.md b/rust/ballista/docs/user-guide/src/deployment.md
deleted file mode 100644
index 2432f2b..0000000
--- a/rust/ballista/docs/user-guide/src/deployment.md
+++ /dev/null
@@ -1,26 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Deployment
-
-Ballista is packaged as Docker images. Refer to the following guides to create a Ballista cluster:
-
-- [Create a cluster using Docker](standalone.md)
-- [Create a cluster using Docker Compose](docker-compose.md)
-- [Create a cluster using Kubernetes](kubernetes.md)
-
diff --git a/rust/ballista/docs/user-guide/src/docker-compose.md b/rust/ballista/docs/user-guide/src/docker-compose.md
deleted file mode 100644
index 2548e57..0000000
--- a/rust/ballista/docs/user-guide/src/docker-compose.md
+++ /dev/null
@@ -1,55 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Installing Ballista with Docker Compose
-
-Docker Compose is a convenient way to launch a cluister when testing locally. The following Docker Compose example 
-demonstrates how to start a cluster using a single process that acts as both a scheduler and an executor, with a data 
-volume mounted into the container so that Ballista can access the host file system.
-
-```yaml
-version: '2.0'
-services:
-  etcd:
-    image: quay.io/coreos/etcd:v3.4.9
-    command: "etcd -advertise-client-urls http://etcd:2379 -listen-client-urls http://0.0.0.0:2379"
-    ports:
-      - "2379:2379"
-  ballista-executor:
-    image: ballistacompute/ballista-rust:0.4.2-SNAPSHOT
-    command: "/executor --bind-host 0.0.0.0 --port 50051 --local"
-    environment:
-      - RUST_LOG=info
-    ports:
-      - "50050:50050"
-      - "50051:50051"
-    volumes:
-      - ./data:/data
-
-
-```
-
-With the above content saved to a `docker-compose.yaml` file, the following command can be used to start the single 
-node cluster.
-
-```bash
-docker-compose up
-```
-
-The scheduler listens on port 50050 and this is the port that clients will need to connect to.
diff --git a/rust/ballista/docs/user-guide/src/faq.md b/rust/ballista/docs/user-guide/src/faq.md
deleted file mode 100644
index b73a376..0000000
--- a/rust/ballista/docs/user-guide/src/faq.md
+++ /dev/null
@@ -1,31 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Frequently Asked Questions
-
-## What is the relationship between Apache Arrow, DataFusion, and Ballista?
-
-Apache Arrow is a library which provides a standardized memory representation for columnar data. It also provides
-"kernels" for performing common operations on this data.
-
-DataFusion is a library for executing queries in-process using the Apache Arrow memory 
-model and computational kernels. It is designed to run within a single process, using threads 
-for parallel query execution. 
-
-Ballista is a distributed compute platform design to leverage DataFusion and other query
-execution libraries.
\ No newline at end of file
diff --git a/rust/ballista/docs/user-guide/src/img/ballista-architecture.png b/rust/ballista/docs/user-guide/src/img/ballista-architecture.png
deleted file mode 100644
index 2f78f29..0000000
Binary files a/rust/ballista/docs/user-guide/src/img/ballista-architecture.png and /dev/null differ
diff --git a/rust/ballista/docs/user-guide/src/introduction.md b/rust/ballista/docs/user-guide/src/introduction.md
deleted file mode 100644
index 59d7a1a..0000000
--- a/rust/ballista/docs/user-guide/src/introduction.md
+++ /dev/null
@@ -1,52 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-## Overview
-
-Ballista is a distributed compute platform primarily implemented in Rust, and powered by Apache Arrow. It is 
-built on an architecture that allows other programming languages to be supported as first-class citizens without paying
-a penalty for serialization costs.
-
-The foundational technologies in Ballista are:
-
-- [Apache Arrow](https://arrow.apache.org/) memory model and compute kernels for efficient processing of data.
-- [Apache Arrow Flight Protocol](https://arrow.apache.org/blog/2019/10/13/introducing-arrow-flight/) for efficient data transfer between processes.
-- [Google Protocol Buffers](https://developers.google.com/protocol-buffers) for serializing query plans.
-- [Docker](https://www.docker.com/) for packaging up executors along with user-defined code.
-
-## Architecture
-
-The following diagram highlights some of the integrations that will be possible with this unique architecture. Note that not all components shown here are available yet.
-
-![Ballista Architecture Diagram](img/ballista-architecture.png)
-
-## How does this compare to Apache Spark?
-
-Although Ballista is largely inspired by Apache Spark, there are some key differences.
-
-- The choice of Rust as the main execution language means that memory usage is deterministic and avoids the overhead of GC pauses.
-- Ballista is designed from the ground up to use columnar data, enabling a number of efficiencies such as vectorized 
-processing (SIMD and GPU) and efficient compression. Although Spark does have some columnar support, it is still 
-largely row-based today.
-- The combination of Rust and Arrow provides excellent memory efficiency and memory usage can be 5x - 10x lower than Apache Spark in some cases, which means that more processing can fit on a single node, reducing the overhead of distributed compute.
-- The use of Apache Arrow as the memory model and network protocol means that data can be exchanged between executors in any programming language with minimal serialization overhead.
-  
-## Status
-
-Ballista is at the proof-of-concept phase currently but is under active development by a growing community.
\ No newline at end of file
diff --git a/rust/ballista/docs/user-guide/src/kubernetes.md b/rust/ballista/docs/user-guide/src/kubernetes.md
deleted file mode 100644
index 8cd8bee..0000000
--- a/rust/ballista/docs/user-guide/src/kubernetes.md
+++ /dev/null
@@ -1,216 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-# Deploying Ballista with Kubernetes
-
-Ballista can be deployed to any Kubernetes cluster using the following instructions. These instructions assume that
-you are already comfortable with managing Kubernetes deployments.
-
-The k8s deployment consists of:
-
-- k8s stateful set for one or more scheduler processes
-- k8s stateful set for one or more executor processes
-- k8s service to route traffic to the schedulers
-- k8s persistent volume and persistent volume claims to make local data accessible to Ballista
-
-## Limitations
-
-Ballista is at an early stage of development and therefore has some significant limitations:
-
-- There is no support for shared object stores such as S3. All data must exist locally on each node in the 
-  cluster, including where any client process runs  (until 
-  [#473](https://github.com/ballista-compute/ballista/issues/473) is resolved).
-- Only a single scheduler instance is currently supported unless the scheduler is configured to use `etcd` as a 
-  backing store.
-
-## Create Persistent Volume and Persistent Volume Claim 
-
-Copy the following yaml to a `pv.yaml` file and apply to the cluster to create a persistent volume and a persistent 
-volume claim so that the specified host directory is available to the containers. This is where any data should be 
-located so that Ballista can execute queries against it.
-
-```yaml
-apiVersion: v1
-kind: PersistentVolume
-metadata:
-  name: data-pv
-  labels:
-    type: local
-spec:
-  storageClassName: manual
-  capacity:
-    storage: 10Gi
-  accessModes:
-    - ReadWriteOnce
-  hostPath:
-    path: "/mnt"
----
-apiVersion: v1
-kind: PersistentVolumeClaim
-metadata:
-  name: data-pv-claim
-spec:
-  storageClassName: manual
-  accessModes:
-    - ReadWriteOnce
-  resources:
-    requests:
-      storage: 3Gi
-```
-
-To apply this yaml:
-
-```bash
-kubectl apply -f pv.yaml
-```
-
-You should see the following output:
-
-```bash
-persistentvolume/data-pv created
-persistentvolumeclaim/data-pv-claim created
-```
-
-## Deploying Ballista Scheduler and Executors
-
-Copy the following yaml to a `cluster.yaml` file.
-
-```yaml
-apiVersion: v1
-kind: Service
-metadata:
-  name: ballista-scheduler
-  labels:
-    app: ballista-scheduler
-spec:
-  ports:
-    - port: 50050
-      name: scheduler
-  clusterIP: None
-  selector:
-    app: ballista-scheduler
----
-apiVersion: apps/v1
-kind: StatefulSet
-metadata:
-  name: ballista-scheduler
-spec:
-  serviceName: "ballista-scheduler"
-  replicas: 1
-  selector:
-    matchLabels:
-      app: ballista-scheduler
-  template:
-    metadata:
-      labels:
-        app: ballista-scheduler
-        ballista-cluster: ballista
-    spec:
-      containers:
-      - name: ballista-scheduler
-        image: ballistacompute/ballista-rust:0.4.2-SNAPSHOT
-        command: ["/scheduler"]
-        args: ["--port=50050"]
-        ports:
-          - containerPort: 50050
-            name: flight
-        volumeMounts:
-          - mountPath: /mnt
-            name: data
-      volumes:
-      - name: data
-        persistentVolumeClaim:
-          claimName: data-pv-claim
----
-apiVersion: apps/v1
-kind: StatefulSet
-metadata:
-  name: ballista-executor
-spec:
-  serviceName: "ballista-scheduler"
-  replicas: 2
-  selector:
-    matchLabels:
-      app: ballista-executor
-  template:
-    metadata:
-      labels:
-        app: ballista-executor
-        ballista-cluster: ballista
-    spec:
-      containers:
-        - name: ballista-executor
-          image: ballistacompute/ballista-rust:0.4.2-SNAPSHOT
-          command: ["/executor"]
-          args: ["--port=50051", "--scheduler-host=ballista-scheduler", "--scheduler-port=50050", "--external-host=$(MY_POD_IP)"]
-          env:
-            - name: MY_POD_IP
-              valueFrom:
-                fieldRef:
-                  fieldPath: status.podIP            
-          ports:
-            - containerPort: 50051
-              name: flight
-          volumeMounts:
-            - mountPath: /mnt
-              name: data
-      volumes:
-        - name: data
-          persistentVolumeClaim:
-            claimName: data-pv-claim
-```
-
-```bash
-$ kubectl apply -f cluster.yaml
-```
-
-This should show the following output:
-
-```
-service/ballista-scheduler created
-statefulset.apps/ballista-scheduler created
-statefulset.apps/ballista-executor created
-```
-
-You can also check status by running `kubectl get pods`:
-
-```bash
-$ kubectl get pods
-NAME                   READY   STATUS    RESTARTS   AGE
-busybox                1/1     Running   0          16m
-ballista-scheduler-0   1/1     Running   0          42s
-ballista-executor-0    1/1     Running   2          42s
-ballista-executor-1    1/1     Running   0          26s
-```
-
-You can view the scheduler logs with `kubectl logs ballista-scheduler-0`:
-
-```
-$ kubectl logs ballista-scheduler-0
-[2021-02-19T00:24:01Z INFO  scheduler] Ballista v0.4.2-SNAPSHOT Scheduler listening on 0.0.0.0:50050
-[2021-02-19T00:24:16Z INFO  ballista::scheduler] Received register_executor request for ExecutorMetadata { id: "b5e81711-1c5c-46ec-8522-d8b359793188", host: "10.1.23.149", port: 50051 }
-[2021-02-19T00:24:17Z INFO  ballista::scheduler] Received register_executor request for ExecutorMetadata { id: "816e4502-a876-4ed8-b33f-86d243dcf63f", host: "10.1.23.150", port: 50051 }
-```
-
-## Deleting the Ballista cluster
-
-Run the following kubectl command to delete the cluster.
-
-```bash
-kubectl delete -f cluster.yaml
-```
\ No newline at end of file
diff --git a/rust/ballista/docs/user-guide/src/standalone.md b/rust/ballista/docs/user-guide/src/standalone.md
deleted file mode 100644
index e4c24fe..0000000
--- a/rust/ballista/docs/user-guide/src/standalone.md
+++ /dev/null
@@ -1,92 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-## Deploying a standalone Ballista cluster
-
-### Start a Scheduler
-
-Start a scheduler using the following syntax:
-
-```bash
-docker run --network=host \
-  -d ballistacompute/ballista-rust:0.4.2-SNAPSHOT \
-  /scheduler --port 50050
-```
-
-Run `docker ps` to check that the process is running:
-
-```
-$ docker ps
-CONTAINER ID   IMAGE                                         COMMAND                  CREATED         STATUS         PORTS     NAMES
-59452ce72138   ballistacompute/ballista-rust:0.4.2-SNAPSHOT   "/scheduler --port 5…"   6 seconds ago   Up 5 seconds             affectionate_hofstadter
-```
-
-Run `docker logs CONTAINER_ID` to check the output from the process:
-
-```
-$ docker logs 59452ce72138
-[2021-02-14T18:32:20Z INFO  scheduler] Ballista v0.4.2-SNAPSHOT Scheduler listening on 0.0.0.0:50050
-```
-
-### Start executors
-
-Start one or more executor processes. Each executor process will need to listen on a different port.
-
-```bash
-docker run --network=host \
-  -d ballistacompute/ballista-rust:0.4.2-SNAPSHOT \
-  /executor --external-host localhost --port 50051 
-```
-
-Use `docker ps` to check that both the scheduer and executor(s) are now running:
-
-```
-$ docker ps
-CONTAINER ID   IMAGE                                         COMMAND                  CREATED         STATUS         PORTS     NAMES
-0746ce262a19   ballistacompute/ballista-rust:0.4.2-SNAPSHOT   "/executor --externa…"   2 seconds ago   Up 1 second              naughty_mclean
-59452ce72138   ballistacompute/ballista-rust:0.4.2-SNAPSHOT   "/scheduler --port 5…"   4 minutes ago   Up 4 minutes             affectionate_hofstadter
-```
-
-Use `docker logs CONTAINER_ID` to check the output from the executor(s):
-
-```
-$ docker logs 0746ce262a19
-[2021-02-14T18:36:25Z INFO  executor] Running with config: ExecutorConfig { host: "localhost", port: 50051, work_dir: "/tmp/.tmpVRFSvn", concurrent_tasks: 4 }
-[2021-02-14T18:36:25Z INFO  executor] Ballista v0.4.2-SNAPSHOT Rust Executor listening on 0.0.0.0:50051
-[2021-02-14T18:36:25Z INFO  executor] Starting registration with scheduler
-```
-
-The external host and port will be registered with the scheduler. The executors will discover other executors by 
-requesting a list of executors from the scheduler.
-
-### Using etcd as backing store
-
-_NOTE: This functionality is currently experimental_
-
-Ballista can optionally use [etcd](https://etcd.io/) as a backing store for the scheduler. 
-
-```bash
-docker run --network=host \
-  -d ballistacompute/ballista-rust:0.4.2-SNAPSHOT \
-  /scheduler --port 50050 \
-  --config-backend etcd \
-  --etcd-urls etcd:2379
-```
-
-Please refer to the [etcd](https://etcd.io/) web site for installation instructions. Etcd version 3.4.9 or later is 
-recommended.
diff --git a/rust/ballista/rust/.dockerignore b/rust/ballista/rust/.dockerignore
deleted file mode 100644
index 96f99a5..0000000
--- a/rust/ballista/rust/.dockerignore
+++ /dev/null
@@ -1,23 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Turn .dockerignore to .dockerallow by excluding everything and explicitly
-# allowing specific files and directories. This enables us to quickly add
-# dependency files to the docker content without scanning the whole directory.
-# This setup requires to all of our docker containers have arrow's source
-# as a mounted directory.
-target
\ No newline at end of file
diff --git a/rust/ballista/rust/.gitignore b/rust/ballista/rust/.gitignore
deleted file mode 100644
index 97eec16..0000000
--- a/rust/ballista/rust/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-target
-temp
\ No newline at end of file
diff --git a/rust/ballista/rust/Cargo.toml b/rust/ballista/rust/Cargo.toml
deleted file mode 100644
index 5e344e0..0000000
--- a/rust/ballista/rust/Cargo.toml
+++ /dev/null
@@ -1,30 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-[workspace]
-
-members = [
-    "benchmarks/tpch",
-    "client",
-    "core",
-    "executor",
-    "scheduler",
-]
-
-#[profile.release]
-#lto = true
-#codegen-units = 1
diff --git a/rust/ballista/rust/benchmarks/tpch/.dockerignore b/rust/ballista/rust/benchmarks/tpch/.dockerignore
deleted file mode 100644
index 3a7d0fd..0000000
--- a/rust/ballista/rust/benchmarks/tpch/.dockerignore
+++ /dev/null
@@ -1,25 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Turn .dockerignore to .dockerallow by excluding everything and explicitly
-# allowing specific files and directories. This enables us to quickly add
-# dependency files to the docker content without scanning the whole directory.
-# This setup requires to all of our docker containers have arrow's source
-# as a mounted directory.
-
-data
-target
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/.gitignore b/rust/ballista/rust/benchmarks/tpch/.gitignore
deleted file mode 100644
index 6320cd2..0000000
--- a/rust/ballista/rust/benchmarks/tpch/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-data
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/Cargo.toml b/rust/ballista/rust/benchmarks/tpch/Cargo.toml
deleted file mode 100644
index 822d101..0000000
--- a/rust/ballista/rust/benchmarks/tpch/Cargo.toml
+++ /dev/null
@@ -1,36 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-[package]
-name = "tpch"
-version = "0.4.2-SNAPSHOT"
-homepage = "https://github.com/apache/arrow"
-repository = "https://github.com/apache/arrow"
-authors = ["Apache Arrow <de...@arrow.apache.org>"]
-license = "Apache-2.0"
-edition = "2018"
-
-[dependencies]
-ballista = { path="../../client" }
-
-arrow = { path = "../../../../arrow"  }
-datafusion = { path = "../../../../datafusion" }
-parquet = { path = "../../../../parquet"  }
-
-env_logger = "0.8"
-tokio = { version = "1.0", features = ["macros", "rt", "rt-multi-thread"] }
-structopt = "0.3"
diff --git a/rust/ballista/rust/benchmarks/tpch/README.md b/rust/ballista/rust/benchmarks/tpch/README.md
deleted file mode 100644
index 6d77694..0000000
--- a/rust/ballista/rust/benchmarks/tpch/README.md
+++ /dev/null
@@ -1,104 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# TPC-H Benchmarks
-
-TPC-H is an industry standard benchmark for testing databases and query engines. A command-line tool is available that
-can generate the raw test data at any given scale factor (scale factor refers to the amount of data to be generated).
-
-## Generating Test Data
-
-TPC-H data can be generated using the `tpch-gen.sh` script, which creates a Docker image containing the TPC-DS data
-generator.
-
-```bash
-./tpch-gen.sh
-```
-
-Data will be generated into the `data` subdirectory and will not be checked in because this directory has been added 
-to the `.gitignore` file.
-
-## Running the Benchmarks
-
-To run the benchmarks it is necessary to have at least one Ballista scheduler and one Ballista executor running.
-
-To run the scheduler from source:
-
-```bash
-cd $ARROW_HOME/rust/ballista/rust/scheduler
-RUST_LOG=info cargo run --release
-```
-
-By default the scheduler will bind to `0.0.0.0` and listen on port 50050.
-
-To run the executor from source:
-
-```bash
-cd $ARROW_HOME/rust/ballista/rust/executor
-RUST_LOG=info cargo run --release
-```
-
-By default the executor will bind to `0.0.0.0` and listen on port 50051.
-
-You can add SIMD/snmalloc/LTO flags to improve speed (with longer build times):
-
-```
-RUST_LOG=info RUSTFLAGS='-C target-cpu=native -C lto -C codegen-units=1 -C embed-bitcode' cargo run --release --bin executor --features "simd snmalloc" --target x86_64-unknown-linux-gnu
-```
-
-To run the benchmarks:
-
-```bash
-cd $ARROW_HOME/rust/ballista/rust/benchmarks/tpch
-cargo run --release benchmark --host localhost --port 50050 --query 1 --path $(pwd)/data --format tbl
-```
-
-## Running the Benchmarks on docker-compose
-
-To start a Rust scheduler and executor using Docker Compose:
-
-```bash
-cd $BALLISTA_HOME
-./dev/build-rust.sh
-cd $BALLISTA_HOME/rust/benchmarks/tpch
-docker-compose up
-```
-
-Then you can run the benchmark with:
-
-```bash
-docker-compose run ballista-client cargo run benchmark --host ballista-scheduler --port 50050 --query 1 --path /data --format tbl
-```
-
-## Expected output
-
-The result of query 1 should produce the following output when executed against the SF=1 dataset.
-
-```
-+--------------+--------------+----------+--------------------+--------------------+--------------------+--------------------+--------------------+----------------------+-------------+
-| l_returnflag | l_linestatus | sum_qty  | sum_base_price     | sum_disc_price     | sum_charge         | avg_qty            | avg_price          | avg_disc             | count_order |
-+--------------+--------------+----------+--------------------+--------------------+--------------------+--------------------+--------------------+----------------------+-------------+
-| A            | F            | 37734107 | 56586554400.73001  | 53758257134.870026 | 55909065222.82768  | 25.522005853257337 | 38273.12973462168  | 0.049985295838396455 | 1478493     |
-| N            | F            | 991417   | 1487504710.3799996 | 1413082168.0541    | 1469649223.1943746 | 25.516471920522985 | 38284.467760848296 | 0.05009342667421622  | 38854       |
-| N            | O            | 74476023 | 111701708529.50996 | 106118209986.10472 | 110367023144.56622 | 25.502229680934594 | 38249.1238377803   | 0.049996589476752576 | 2920373     |
-| R            | F            | 37719753 | 56568041380.90001  | 53741292684.60399  | 55889619119.83194  | 25.50579361269077  | 38250.854626099666 | 0.05000940583012587  | 1478870     |
-+--------------+--------------+----------+--------------------+--------------------+--------------------+--------------------+--------------------+----------------------+-------------+
-Query 1 iteration 0 took 1956.1 ms
-Query 1 avg time: 1956.11 ms
-```
diff --git a/rust/ballista/rust/benchmarks/tpch/docker-compose.yaml b/rust/ballista/rust/benchmarks/tpch/docker-compose.yaml
deleted file mode 100644
index f872ce1..0000000
--- a/rust/ballista/rust/benchmarks/tpch/docker-compose.yaml
+++ /dev/null
@@ -1,62 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-version: '2.0'
-services:
-  etcd:
-    image: quay.io/coreos/etcd:v3.4.9
-    command: "etcd -advertise-client-urls http://etcd:2379 -listen-client-urls http://0.0.0.0:2379"
-  ballista-scheduler:
-    image: ballistacompute/ballista-rust:0.4.2-SNAPSHOT
-    command: "/scheduler --config-backend etcd --etcd-urls etcd:2379 --bind-host 0.0.0.0 --port 50050"
-    environment:
-      - RUST_LOG=ballista=debug
-    volumes:
-      - ./data:/data
-    depends_on:
-      - etcd
-  ballista-executor-1:
-    image: ballistacompute/ballista-rust:0.4.2-SNAPSHOT
-    command: "/executor --bind-host 0.0.0.0 --port 50051 --external-host ballista-executor-1 --scheduler-host ballista-scheduler"
-    environment:
-      - RUST_LOG=info
-    volumes:
-      - ./data:/data
-    depends_on:
-      - ballista-scheduler
-  ballista-executor-2:
-    image: ballistacompute/ballista-rust:0.4.2-SNAPSHOT
-    command: "/executor --bind-host 0.0.0.0 --port 50052 --external-host ballista-executor-2 --scheduler-host ballista-scheduler"
-    environment:
-      - RUST_LOG=info
-    volumes:
-      - ./data:/data
-    depends_on:
-      - ballista-scheduler
-  ballista-client:
-    image: ballistacompute/ballista-rust:0.4.2-SNAPSHOT
-    command: "/bin/sh" # do nothing
-    working_dir: /ballista/benchmarks/tpch
-    environment:
-      - RUST_LOG=info
-    volumes:
-      - ./data:/data
-      - ../..:/ballista
-    depends_on:
-      - ballista-scheduler
-      - ballista-executor-1
-      - ballista-executor-2
-
diff --git a/rust/ballista/rust/benchmarks/tpch/entrypoint.sh b/rust/ballista/rust/benchmarks/tpch/entrypoint.sh
deleted file mode 100755
index 71c0432..0000000
--- a/rust/ballista/rust/benchmarks/tpch/entrypoint.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-set -e
-cd /tpch-dbgen
-./dbgen -vf -s 1
-mv *.tbl /data
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q1.sql b/rust/ballista/rust/benchmarks/tpch/queries/q1.sql
deleted file mode 100644
index a0fcf15..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q1.sql
+++ /dev/null
@@ -1,21 +0,0 @@
-select
-    l_returnflag,
-    l_linestatus,
-    sum(l_quantity) as sum_qty,
-    sum(l_extendedprice) as sum_base_price,
-    sum(l_extendedprice * (1 - l_discount)) as sum_disc_price,
-    sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge,
-    avg(l_quantity) as avg_qty,
-    avg(l_extendedprice) as avg_price,
-    avg(l_discount) as avg_disc,
-    count(*) as count_order
-from
-    lineitem
-where
-        l_shipdate <= date '1998-09-02'
-group by
-    l_returnflag,
-    l_linestatus
-order by
-    l_returnflag,
-    l_linestatus;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q10.sql b/rust/ballista/rust/benchmarks/tpch/queries/q10.sql
deleted file mode 100644
index cf45e43..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q10.sql
+++ /dev/null
@@ -1,31 +0,0 @@
-select
-    c_custkey,
-    c_name,
-    sum(l_extendedprice * (1 - l_discount)) as revenue,
-    c_acctbal,
-    n_name,
-    c_address,
-    c_phone,
-    c_comment
-from
-    customer,
-    orders,
-    lineitem,
-    nation
-where
-        c_custkey = o_custkey
-  and l_orderkey = o_orderkey
-  and o_orderdate >= date '1993-10-01'
-  and o_orderdate < date '1994-01-01'
-  and l_returnflag = 'R'
-  and c_nationkey = n_nationkey
-group by
-    c_custkey,
-    c_name,
-    c_acctbal,
-    c_phone,
-    n_name,
-    c_address,
-    c_comment
-order by
-    revenue desc;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q11.sql b/rust/ballista/rust/benchmarks/tpch/queries/q11.sql
deleted file mode 100644
index c23ed1c..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q11.sql
+++ /dev/null
@@ -1,27 +0,0 @@
-select
-    ps_partkey,
-    sum(ps_supplycost * ps_availqty) as value
-from
-    partsupp,
-    supplier,
-    nation
-where
-    ps_suppkey = s_suppkey
-  and s_nationkey = n_nationkey
-  and n_name = 'GERMANY'
-group by
-    ps_partkey having
-    sum(ps_supplycost * ps_availqty) > (
-    select
-    sum(ps_supplycost * ps_availqty) * 0.0001
-    from
-    partsupp,
-    supplier,
-    nation
-    where
-    ps_suppkey = s_suppkey
-                  and s_nationkey = n_nationkey
-                  and n_name = 'GERMANY'
-    )
-order by
-    value desc;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q12.sql b/rust/ballista/rust/benchmarks/tpch/queries/q12.sql
deleted file mode 100644
index f8e6d96..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q12.sql
+++ /dev/null
@@ -1,30 +0,0 @@
-select
-    l_shipmode,
-    sum(case
-            when o_orderpriority = '1-URGENT'
-                or o_orderpriority = '2-HIGH'
-                then 1
-            else 0
-        end) as high_line_count,
-    sum(case
-            when o_orderpriority <> '1-URGENT'
-                and o_orderpriority <> '2-HIGH'
-                then 1
-            else 0
-        end) as low_line_count
-from
-    lineitem
-        join
-    orders
-    on
-            l_orderkey = o_orderkey
-where
-        l_shipmode in ('MAIL', 'SHIP')
-  and l_commitdate < l_receiptdate
-  and l_shipdate < l_commitdate
-  and l_receiptdate >= date '1994-01-01'
-  and l_receiptdate < date '1995-01-01'
-group by
-    l_shipmode
-order by
-    l_shipmode;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q13.sql b/rust/ballista/rust/benchmarks/tpch/queries/q13.sql
deleted file mode 100644
index 4bfe8c3..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q13.sql
+++ /dev/null
@@ -1,20 +0,0 @@
-select
-    c_count,
-    count(*) as custdist
-from
-    (
-        select
-            c_custkey,
-            count(o_orderkey)
-        from
-            customer left outer join orders on
-                        c_custkey = o_custkey
-                    and o_comment not like '%special%requests%'
-        group by
-            c_custkey
-    ) as c_orders (c_custkey, c_count)
-group by
-    c_count
-order by
-    custdist desc,
-    c_count desc;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q14.sql b/rust/ballista/rust/benchmarks/tpch/queries/q14.sql
deleted file mode 100644
index d8ef6af..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q14.sql
+++ /dev/null
@@ -1,13 +0,0 @@
-select
-            100.00 * sum(case
-                             when p_type like 'PROMO%'
-                                 then l_extendedprice * (1 - l_discount)
-                             else 0
-            end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
-from
-    lineitem,
-    part
-where
-        l_partkey = p_partkey
-  and l_shipdate >= date '1995-09-01'
-  and l_shipdate < date '1995-10-01';
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q16.sql b/rust/ballista/rust/benchmarks/tpch/queries/q16.sql
deleted file mode 100644
index 36b7c07..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q16.sql
+++ /dev/null
@@ -1,30 +0,0 @@
-select
-    p_brand,
-    p_type,
-    p_size,
-    count(distinct ps_suppkey) as supplier_cnt
-from
-    partsupp,
-    part
-where
-        p_partkey = ps_partkey
-  and p_brand <> 'Brand#45'
-  and p_type not like 'MEDIUM POLISHED%'
-  and p_size in (49, 14, 23, 45, 19, 3, 36, 9)
-  and ps_suppkey not in (
-    select
-        s_suppkey
-    from
-        supplier
-    where
-            s_comment like '%Customer%Complaints%'
-)
-group by
-    p_brand,
-    p_type,
-    p_size
-order by
-    supplier_cnt desc,
-    p_brand,
-    p_type,
-    p_size;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q17.sql b/rust/ballista/rust/benchmarks/tpch/queries/q17.sql
deleted file mode 100644
index 1e65550..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q17.sql
+++ /dev/null
@@ -1,17 +0,0 @@
-select
-        sum(l_extendedprice) / 7.0 as avg_yearly
-from
-    lineitem,
-    part
-where
-        p_partkey = l_partkey
-  and p_brand = 'Brand#23'
-  and p_container = 'MED BOX'
-  and l_quantity < (
-    select
-            0.2 * avg(l_quantity)
-    from
-        lineitem
-    where
-            l_partkey = p_partkey
-);
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q18.sql b/rust/ballista/rust/benchmarks/tpch/queries/q18.sql
deleted file mode 100644
index 835de28..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q18.sql
+++ /dev/null
@@ -1,32 +0,0 @@
-select
-    c_name,
-    c_custkey,
-    o_orderkey,
-    o_orderdate,
-    o_totalprice,
-    sum(l_quantity)
-from
-    customer,
-    orders,
-    lineitem
-where
-        o_orderkey in (
-        select
-            l_orderkey
-        from
-            lineitem
-        group by
-            l_orderkey having
-                sum(l_quantity) > 300
-    )
-  and c_custkey = o_custkey
-  and o_orderkey = l_orderkey
-group by
-    c_name,
-    c_custkey,
-    o_orderkey,
-    o_orderdate,
-    o_totalprice
-order by
-    o_totalprice desc,
-    o_orderdate;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q19.sql b/rust/ballista/rust/benchmarks/tpch/queries/q19.sql
deleted file mode 100644
index 56668e7..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q19.sql
+++ /dev/null
@@ -1,35 +0,0 @@
-select
-    sum(l_extendedprice* (1 - l_discount)) as revenue
-from
-    lineitem,
-    part
-where
-    (
-                p_partkey = l_partkey
-            and p_brand = 'Brand#12'
-            and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG')
-            and l_quantity >= 1 and l_quantity <= 1 + 10
-            and p_size between 1 and 5
-            and l_shipmode in ('AIR', 'AIR REG')
-            and l_shipinstruct = 'DELIVER IN PERSON'
-        )
-   or
-    (
-                p_partkey = l_partkey
-            and p_brand = 'Brand#23'
-            and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK')
-            and l_quantity >= 10 and l_quantity <= 10 + 10
-            and p_size between 1 and 10
-            and l_shipmode in ('AIR', 'AIR REG')
-            and l_shipinstruct = 'DELIVER IN PERSON'
-        )
-   or
-    (
-                p_partkey = l_partkey
-            and p_brand = 'Brand#34'
-            and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG')
-            and l_quantity >= 20 and l_quantity <= 20 + 10
-            and p_size between 1 and 15
-            and l_shipmode in ('AIR', 'AIR REG')
-            and l_shipinstruct = 'DELIVER IN PERSON'
-        );
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q2.sql b/rust/ballista/rust/benchmarks/tpch/queries/q2.sql
deleted file mode 100644
index f66af21..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q2.sql
+++ /dev/null
@@ -1,43 +0,0 @@
-select
-    s_acctbal,
-    s_name,
-    n_name,
-    p_partkey,
-    p_mfgr,
-    s_address,
-    s_phone,
-    s_comment
-from
-    part,
-    supplier,
-    partsupp,
-    nation,
-    region
-where
-        p_partkey = ps_partkey
-  and s_suppkey = ps_suppkey
-  and p_size = 15
-  and p_type like '%BRASS'
-  and s_nationkey = n_nationkey
-  and n_regionkey = r_regionkey
-  and r_name = 'EUROPE'
-  and ps_supplycost = (
-    select
-        min(ps_supplycost)
-    from
-        partsupp,
-        supplier,
-        nation,
-        region
-    where
-            p_partkey = ps_partkey
-      and s_suppkey = ps_suppkey
-      and s_nationkey = n_nationkey
-      and n_regionkey = r_regionkey
-      and r_name = 'EUROPE'
-)
-order by
-    s_acctbal desc,
-    n_name,
-    s_name,
-    p_partkey;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q20.sql b/rust/ballista/rust/benchmarks/tpch/queries/q20.sql
deleted file mode 100644
index f0339a6..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q20.sql
+++ /dev/null
@@ -1,37 +0,0 @@
-select
-    s_name,
-    s_address
-from
-    supplier,
-    nation
-where
-        s_suppkey in (
-        select
-            ps_suppkey
-        from
-            partsupp
-        where
-                ps_partkey in (
-                select
-                    p_partkey
-                from
-                    part
-                where
-                        p_name like 'forest%'
-            )
-          and ps_availqty > (
-            select
-                    0.5 * sum(l_quantity)
-            from
-                lineitem
-            where
-                    l_partkey = ps_partkey
-              and l_suppkey = ps_suppkey
-              and l_shipdate >= date '1994-01-01'
-              and l_shipdate < 'date 1994-01-01' + interval '1' year
-        )
-    )
-  and s_nationkey = n_nationkey
-  and n_name = 'CANADA'
-order by
-    s_name;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q21.sql b/rust/ballista/rust/benchmarks/tpch/queries/q21.sql
deleted file mode 100644
index 9d2fe32..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q21.sql
+++ /dev/null
@@ -1,39 +0,0 @@
-select
-    s_name,
-    count(*) as numwait
-from
-    supplier,
-    lineitem l1,
-    orders,
-    nation
-where
-        s_suppkey = l1.l_suppkey
-  and o_orderkey = l1.l_orderkey
-  and o_orderstatus = 'F'
-  and l1.l_receiptdate > l1.l_commitdate
-  and exists (
-        select
-            *
-        from
-            lineitem l2
-        where
-                l2.l_orderkey = l1.l_orderkey
-          and l2.l_suppkey <> l1.l_suppkey
-    )
-  and not exists (
-        select
-            *
-        from
-            lineitem l3
-        where
-                l3.l_orderkey = l1.l_orderkey
-          and l3.l_suppkey <> l1.l_suppkey
-          and l3.l_receiptdate > l3.l_commitdate
-    )
-  and s_nationkey = n_nationkey
-  and n_name = 'SAUDI ARABIA'
-group by
-    s_name
-order by
-    numwait desc,
-    s_name;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q22.sql b/rust/ballista/rust/benchmarks/tpch/queries/q22.sql
deleted file mode 100644
index 90aea6f..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q22.sql
+++ /dev/null
@@ -1,37 +0,0 @@
-select
-    cntrycode,
-    count(*) as numcust,
-    sum(c_acctbal) as totacctbal
-from
-    (
-        select
-            substring(c_phone from 1 for 2) as cntrycode,
-            c_acctbal
-        from
-            customer
-        where
-                substring(c_phone from 1 for 2) in
-                ('13', '31', '23', '29', '30', '18', '17')
-          and c_acctbal > (
-            select
-                avg(c_acctbal)
-            from
-                customer
-            where
-                    c_acctbal > 0.00
-              and substring(c_phone from 1 for 2) in
-                  ('13', '31', '23', '29', '30', '18', '17')
-        )
-          and not exists (
-                select
-                    *
-                from
-                    orders
-                where
-                        o_custkey = c_custkey
-            )
-    ) as custsale
-group by
-    cntrycode
-order by
-    cntrycode;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q3.sql b/rust/ballista/rust/benchmarks/tpch/queries/q3.sql
deleted file mode 100644
index 7dbc6d9..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q3.sql
+++ /dev/null
@@ -1,22 +0,0 @@
-select
-    l_orderkey,
-    sum(l_extendedprice * (1 - l_discount)) as revenue,
-    o_orderdate,
-    o_shippriority
-from
-    customer,
-    orders,
-    lineitem
-where
-        c_mktsegment = 'BUILDING'
-  and c_custkey = o_custkey
-  and l_orderkey = o_orderkey
-  and o_orderdate < date '1995-03-15'
-  and l_shipdate > date '1995-03-15'
-group by
-    l_orderkey,
-    o_orderdate,
-    o_shippriority
-order by
-    revenue desc,
-    o_orderdate;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q4.sql b/rust/ballista/rust/benchmarks/tpch/queries/q4.sql
deleted file mode 100644
index 74a620d..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q4.sql
+++ /dev/null
@@ -1,21 +0,0 @@
-select
-    o_orderpriority,
-    count(*) as order_count
-from
-    orders
-where
-        o_orderdate >= '1993-07-01'
-  and o_orderdate < date '1993-07-01' + interval '3' month
-  and exists (
-        select
-            *
-        from
-            lineitem
-        where
-                l_orderkey = o_orderkey
-          and l_commitdate < l_receiptdate
-    )
-group by
-    o_orderpriority
-order by
-    o_orderpriority;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q5.sql b/rust/ballista/rust/benchmarks/tpch/queries/q5.sql
deleted file mode 100644
index 5a336b2..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q5.sql
+++ /dev/null
@@ -1,24 +0,0 @@
-select
-    n_name,
-    sum(l_extendedprice * (1 - l_discount)) as revenue
-from
-    customer,
-    orders,
-    lineitem,
-    supplier,
-    nation,
-    region
-where
-        c_custkey = o_custkey
-  and l_orderkey = o_orderkey
-  and l_suppkey = s_suppkey
-  and c_nationkey = s_nationkey
-  and s_nationkey = n_nationkey
-  and n_regionkey = r_regionkey
-  and r_name = 'ASIA'
-  and o_orderdate >= date '1994-01-01'
-  and o_orderdate < date '1995-01-01'
-group by
-    n_name
-order by
-    revenue desc;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q6.sql b/rust/ballista/rust/benchmarks/tpch/queries/q6.sql
deleted file mode 100644
index 5806f98..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q6.sql
+++ /dev/null
@@ -1,9 +0,0 @@
-select
-    sum(l_extendedprice * l_discount) as revenue
-from
-    lineitem
-where
-        l_shipdate >= date '1994-01-01'
-  and l_shipdate < date '1995-01-01'
-  and l_discount between 0.06 - 0.01 and 0.06 + 0.01
-  and l_quantity < 24;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q7.sql b/rust/ballista/rust/benchmarks/tpch/queries/q7.sql
deleted file mode 100644
index d53877c..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q7.sql
+++ /dev/null
@@ -1,39 +0,0 @@
-select
-    supp_nation,
-    cust_nation,
-    l_year,
-    sum(volume) as revenue
-from
-    (
-        select
-            n1.n_name as supp_nation,
-            n2.n_name as cust_nation,
-            extract(year from l_shipdate) as l_year,
-            l_extendedprice * (1 - l_discount) as volume
-        from
-            supplier,
-            lineitem,
-            orders,
-            customer,
-            nation n1,
-            nation n2
-        where
-                s_suppkey = l_suppkey
-          and o_orderkey = l_orderkey
-          and c_custkey = o_custkey
-          and s_nationkey = n1.n_nationkey
-          and c_nationkey = n2.n_nationkey
-          and (
-                (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY')
-                or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE')
-            )
-          and l_shipdate between date '1995-01-01' and date '1996-12-31'
-    ) as shipping
-group by
-    supp_nation,
-    cust_nation,
-    l_year
-order by
-    supp_nation,
-    cust_nation,
-    l_year;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q8.sql b/rust/ballista/rust/benchmarks/tpch/queries/q8.sql
deleted file mode 100644
index 6ddb2a6..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q8.sql
+++ /dev/null
@@ -1,37 +0,0 @@
-select
-    o_year,
-    sum(case
-            when nation = 'BRAZIL' then volume
-            else 0
-        end) / sum(volume) as mkt_share
-from
-    (
-        select
-            extract(year from o_orderdate) as o_year,
-            l_extendedprice * (1 - l_discount) as volume,
-            n2.n_name as nation
-        from
-            part,
-            supplier,
-            lineitem,
-            orders,
-            customer,
-            nation n1,
-            nation n2,
-            region
-        where
-                p_partkey = l_partkey
-          and s_suppkey = l_suppkey
-          and l_orderkey = o_orderkey
-          and o_custkey = c_custkey
-          and c_nationkey = n1.n_nationkey
-          and n1.n_regionkey = r_regionkey
-          and r_name = 'AMERICA'
-          and s_nationkey = n2.n_nationkey
-          and o_orderdate between date '1995-01-01' and date '1996-12-31'
-          and p_type = 'ECONOMY ANODIZED STEEL'
-    ) as all_nations
-group by
-    o_year
-order by
-    o_year;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/queries/q9.sql b/rust/ballista/rust/benchmarks/tpch/queries/q9.sql
deleted file mode 100644
index 587bbc8..0000000
--- a/rust/ballista/rust/benchmarks/tpch/queries/q9.sql
+++ /dev/null
@@ -1,32 +0,0 @@
-select
-    nation,
-    o_year,
-    sum(amount) as sum_profit
-from
-    (
-        select
-            n_name as nation,
-            extract(year from o_orderdate) as o_year,
-            l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount
-        from
-            part,
-            supplier,
-            lineitem,
-            partsupp,
-            orders,
-            nation
-        where
-                s_suppkey = l_suppkey
-          and ps_suppkey = l_suppkey
-          and ps_partkey = l_partkey
-          and p_partkey = l_partkey
-          and o_orderkey = l_orderkey
-          and s_nationkey = n_nationkey
-          and p_name like '%green%'
-    ) as profit
-group by
-    nation,
-    o_year
-order by
-    nation,
-    o_year desc;
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/run.sh b/rust/ballista/rust/benchmarks/tpch/run.sh
deleted file mode 100755
index c8a36b6..0000000
--- a/rust/ballista/rust/benchmarks/tpch/run.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-set -e
-
-# This bash script is meant to be run inside the docker-compose environment. Check the README for instructions
-
-for query in 1 3 5 6 10 12
-do
-  /tpch benchmark --host ballista-scheduler --port 50050 --query $query --path /data --format tbl --iterations 1 --debug
-done
diff --git a/rust/ballista/rust/benchmarks/tpch/src/main.rs b/rust/ballista/rust/benchmarks/tpch/src/main.rs
deleted file mode 100644
index 1ba46ea..0000000
--- a/rust/ballista/rust/benchmarks/tpch/src/main.rs
+++ /dev/null
@@ -1,360 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! Benchmark derived from TPC-H. This is not an official TPC-H benchmark.
-//!
-//! This is a modified version of the DataFusion version of these benchmarks.
-
-use std::collections::HashMap;
-use std::fs;
-use std::path::{Path, PathBuf};
-use std::time::Instant;
-
-use arrow::datatypes::{DataType, Field, Schema};
-use arrow::util::pretty;
-use ballista::prelude::*;
-use datafusion::prelude::*;
-use parquet::basic::Compression;
-use parquet::file::properties::WriterProperties;
-use structopt::StructOpt;
-
-#[derive(Debug, StructOpt)]
-struct BenchmarkOpt {
-    /// Ballista executor host
-    #[structopt(long = "host")]
-    host: String,
-
-    /// Ballista executor port
-    #[structopt(long = "port")]
-    port: u16,
-
-    /// Query number
-    #[structopt(long)]
-    query: usize,
-
-    /// Activate debug mode to see query results
-    #[structopt(long)]
-    debug: bool,
-
-    /// Number of iterations of each test run
-    #[structopt(long = "iterations", default_value = "1")]
-    iterations: usize,
-
-    /// Batch size when reading CSV or Parquet files
-    #[structopt(long = "batch-size", default_value = "32768")]
-    batch_size: usize,
-
-    /// Path to data files
-    #[structopt(parse(from_os_str), required = true, long = "path")]
-    path: PathBuf,
-
-    /// File format: `csv`, `tbl` or `parquet`
-    #[structopt(long = "format")]
-    file_format: String,
-}
-
-#[derive(Debug, StructOpt)]
-struct ConvertOpt {
-    /// Path to csv files
-    #[structopt(parse(from_os_str), required = true, short = "i", long = "input")]
-    input_path: PathBuf,
-
-    /// Output path
-    #[structopt(parse(from_os_str), required = true, short = "o", long = "output")]
-    output_path: PathBuf,
-
-    /// Output file format: `csv` or `parquet`
-    #[structopt(short = "f", long = "format")]
-    file_format: String,
-
-    /// Compression to use when writing Parquet files
-    #[structopt(short = "c", long = "compression", default_value = "snappy")]
-    compression: String,
-
-    /// Number of partitions to produce
-    #[structopt(short = "p", long = "partitions", default_value = "1")]
-    partitions: usize,
-
-    /// Batch size when reading CSV or Parquet files
-    #[structopt(short = "s", long = "batch-size", default_value = "4096")]
-    batch_size: usize,
-}
-
-#[derive(Debug, StructOpt)]
-#[structopt(name = "TPC-H", about = "TPC-H Benchmarks.")]
-enum TpchOpt {
-    Benchmark(BenchmarkOpt),
-    Convert(ConvertOpt),
-}
-
-const TABLES: &[&str] = &[
-    "part", "supplier", "partsupp", "customer", "orders", "lineitem", "nation", "region",
-];
-
-#[tokio::main]
-async fn main() -> Result<()> {
-    env_logger::init();
-    match TpchOpt::from_args() {
-        TpchOpt::Benchmark(opt) => benchmark(opt).await.map(|_| ()),
-        TpchOpt::Convert(opt) => convert_tbl(opt).await,
-    }
-}
-
-async fn benchmark(opt: BenchmarkOpt) -> Result<()> {
-    println!("Running benchmarks with the following options: {:?}", opt);
-
-    let mut settings = HashMap::new();
-    settings.insert("batch.size".to_owned(), format!("{}", opt.batch_size));
-
-    let ctx = BallistaContext::remote(opt.host.as_str(), opt.port, settings);
-
-    // register tables with Ballista context
-    let path = opt.path.to_str().unwrap();
-    let file_format = opt.file_format.as_str();
-    for table in TABLES {
-        match file_format {
-            // dbgen creates .tbl ('|' delimited) files without header
-            "tbl" => {
-                let path = format!("{}/{}.tbl", path, table);
-                let schema = get_schema(table);
-                let options = CsvReadOptions::new()
-                    .schema(&schema)
-                    .delimiter(b'|')
-                    .has_header(false)
-                    .file_extension(".tbl");
-                ctx.register_csv(table, &path, options)?;
-            }
-            "csv" => {
-                let path = format!("{}/{}", path, table);
-                let schema = get_schema(table);
-                let options = CsvReadOptions::new().schema(&schema).has_header(true);
-                ctx.register_csv(table, &path, options)?;
-            }
-            "parquet" => {
-                let path = format!("{}/{}", path, table);
-                ctx.register_parquet(table, &path)?;
-            }
-            other => {
-                unimplemented!("Invalid file format '{}'", other);
-            }
-        }
-    }
-
-    let mut millis = vec![];
-
-    // run benchmark
-    let sql = get_query_sql(opt.query)?;
-    println!("Running benchmark with query {}:\n {}", opt.query, sql);
-    for i in 0..opt.iterations {
-        let start = Instant::now();
-        let df = ctx.sql(&sql)?;
-        let mut batches = vec![];
-        let mut stream = df.collect().await?;
-        while let Some(result) = stream.next().await {
-            let batch = result?;
-            batches.push(batch);
-        }
-        let elapsed = start.elapsed().as_secs_f64() * 1000.0;
-        millis.push(elapsed as f64);
-        println!("Query {} iteration {} took {:.1} ms", opt.query, i, elapsed);
-        if opt.debug {
-            pretty::print_batches(&batches)?;
-        }
-    }
-
-    let avg = millis.iter().sum::<f64>() / millis.len() as f64;
-    println!("Query {} avg time: {:.2} ms", opt.query, avg);
-
-    Ok(())
-}
-
-fn get_query_sql(query: usize) -> Result<String> {
-    if query > 0 && query < 23 {
-        let filename = format!("queries/q{}.sql", query);
-        Ok(fs::read_to_string(&filename).expect("failed to read query"))
-    } else {
-        Err(BallistaError::General(
-            "invalid query. Expected value between 1 and 22".to_owned(),
-        ))
-    }
-}
-
-async fn convert_tbl(opt: ConvertOpt) -> Result<()> {
-    let output_root_path = Path::new(&opt.output_path);
-    for table in TABLES {
-        let start = Instant::now();
-        let schema = get_schema(table);
-
-        let input_path = format!("{}/{}.tbl", opt.input_path.to_str().unwrap(), table);
-        let options = CsvReadOptions::new()
-            .schema(&schema)
-            .delimiter(b'|')
-            .file_extension(".tbl");
-
-        let config = ExecutionConfig::new().with_batch_size(opt.batch_size);
-        let mut ctx = ExecutionContext::with_config(config);
-
-        // build plan to read the TBL file
-        let mut csv = ctx.read_csv(&input_path, options)?;
-
-        // optionally, repartition the file
-        if opt.partitions > 1 {
-            csv = csv.repartition(Partitioning::RoundRobinBatch(opt.partitions))?
-        }
-
-        // create the physical plan
-        let csv = csv.to_logical_plan();
-        let csv = ctx.optimize(&csv)?;
-        let csv = ctx.create_physical_plan(&csv)?;
-
-        let output_path = output_root_path.join(table);
-        let output_path = output_path.to_str().unwrap().to_owned();
-
-        println!(
-            "Converting '{}' to {} files in directory '{}'",
-            &input_path, &opt.file_format, &output_path
-        );
-        match opt.file_format.as_str() {
-            "csv" => ctx.write_csv(csv, output_path).await?,
-            "parquet" => {
-                let compression = match opt.compression.as_str() {
-                    "none" => Compression::UNCOMPRESSED,
-                    "snappy" => Compression::SNAPPY,
-                    "brotli" => Compression::BROTLI,
-                    "gzip" => Compression::GZIP,
-                    "lz4" => Compression::LZ4,
-                    "lz0" => Compression::LZO,
-                    "zstd" => Compression::ZSTD,
-                    other => {
-                        return Err(BallistaError::NotImplemented(format!(
-                            "Invalid compression format: {}",
-                            other
-                        )))
-                    }
-                };
-                let props = WriterProperties::builder()
-                    .set_compression(compression)
-                    .build();
-                ctx.write_parquet(csv, output_path, Some(props)).await?
-            }
-            other => {
-                return Err(BallistaError::NotImplemented(format!(
-                    "Invalid output format: {}",
-                    other
-                )))
-            }
-        }
-        println!("Conversion completed in {} ms", start.elapsed().as_millis());
-    }
-
-    Ok(())
-}
-
-fn get_schema(table: &str) -> Schema {
-    // note that the schema intentionally uses signed integers so that any generated Parquet
-    // files can also be used to benchmark tools that only support signed integers, such as
-    // Apache Spark
-
-    match table {
-        "part" => Schema::new(vec![
-            Field::new("p_partkey", DataType::Int32, false),
-            Field::new("p_name", DataType::Utf8, false),
-            Field::new("p_mfgr", DataType::Utf8, false),
-            Field::new("p_brand", DataType::Utf8, false),
-            Field::new("p_type", DataType::Utf8, false),
-            Field::new("p_size", DataType::Int32, false),
-            Field::new("p_container", DataType::Utf8, false),
-            Field::new("p_retailprice", DataType::Float64, false),
-            Field::new("p_comment", DataType::Utf8, false),
-        ]),
-
-        "supplier" => Schema::new(vec![
-            Field::new("s_suppkey", DataType::Int32, false),
-            Field::new("s_name", DataType::Utf8, false),
-            Field::new("s_address", DataType::Utf8, false),
-            Field::new("s_nationkey", DataType::Int32, false),
-            Field::new("s_phone", DataType::Utf8, false),
-            Field::new("s_acctbal", DataType::Float64, false),
-            Field::new("s_comment", DataType::Utf8, false),
-        ]),
-
-        "partsupp" => Schema::new(vec![
-            Field::new("ps_partkey", DataType::Int32, false),
-            Field::new("ps_suppkey", DataType::Int32, false),
-            Field::new("ps_availqty", DataType::Int32, false),
-            Field::new("ps_supplycost", DataType::Float64, false),
-            Field::new("ps_comment", DataType::Utf8, false),
-        ]),
-
-        "customer" => Schema::new(vec![
-            Field::new("c_custkey", DataType::Int32, false),
-            Field::new("c_name", DataType::Utf8, false),
-            Field::new("c_address", DataType::Utf8, false),
-            Field::new("c_nationkey", DataType::Int32, false),
-            Field::new("c_phone", DataType::Utf8, false),
-            Field::new("c_acctbal", DataType::Float64, false),
-            Field::new("c_mktsegment", DataType::Utf8, false),
-            Field::new("c_comment", DataType::Utf8, false),
-        ]),
-
-        "orders" => Schema::new(vec![
-            Field::new("o_orderkey", DataType::Int32, false),
-            Field::new("o_custkey", DataType::Int32, false),
-            Field::new("o_orderstatus", DataType::Utf8, false),
-            Field::new("o_totalprice", DataType::Float64, false),
-            Field::new("o_orderdate", DataType::Date32, false),
-            Field::new("o_orderpriority", DataType::Utf8, false),
-            Field::new("o_clerk", DataType::Utf8, false),
-            Field::new("o_shippriority", DataType::Int32, false),
-            Field::new("o_comment", DataType::Utf8, false),
-        ]),
-
-        "lineitem" => Schema::new(vec![
-            Field::new("l_orderkey", DataType::Int32, false),
-            Field::new("l_partkey", DataType::Int32, false),
-            Field::new("l_suppkey", DataType::Int32, false),
-            Field::new("l_linenumber", DataType::Int32, false),
-            Field::new("l_quantity", DataType::Float64, false),
-            Field::new("l_extendedprice", DataType::Float64, false),
-            Field::new("l_discount", DataType::Float64, false),
-            Field::new("l_tax", DataType::Float64, false),
-            Field::new("l_returnflag", DataType::Utf8, false),
-            Field::new("l_linestatus", DataType::Utf8, false),
-            Field::new("l_shipdate", DataType::Date32, false),
-            Field::new("l_commitdate", DataType::Date32, false),
-            Field::new("l_receiptdate", DataType::Date32, false),
-            Field::new("l_shipinstruct", DataType::Utf8, false),
-            Field::new("l_shipmode", DataType::Utf8, false),
-            Field::new("l_comment", DataType::Utf8, false),
-        ]),
-
-        "nation" => Schema::new(vec![
-            Field::new("n_nationkey", DataType::Int32, false),
-            Field::new("n_name", DataType::Utf8, false),
-            Field::new("n_regionkey", DataType::Int32, false),
-            Field::new("n_comment", DataType::Utf8, false),
-        ]),
-
-        "region" => Schema::new(vec![
-            Field::new("r_regionkey", DataType::Int32, false),
-            Field::new("r_name", DataType::Utf8, false),
-            Field::new("r_comment", DataType::Utf8, false),
-        ]),
-
-        _ => unimplemented!(),
-    }
-}
diff --git a/rust/ballista/rust/benchmarks/tpch/tpch-gen.sh b/rust/ballista/rust/benchmarks/tpch/tpch-gen.sh
deleted file mode 100755
index f5147f5..0000000
--- a/rust/ballista/rust/benchmarks/tpch/tpch-gen.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-BALLISTA_VERSION=0.4.2-SNAPSHOT
-
-#set -e
-
-docker build -t ballistacompute/ballista-tpchgen:$BALLISTA_VERSION -f tpchgen.dockerfile .
-
-# Generate data into the ./data directory if it does not already exist
-FILE=./data/supplier.tbl
-if test -f "$FILE"; then
-    echo "$FILE exists."
-else
-  mkdir data 2>/dev/null
-  docker run -v `pwd`/data:/data -it --rm ballistacompute/ballista-tpchgen:$BALLISTA_VERSION
-  ls -l data
-fi
\ No newline at end of file
diff --git a/rust/ballista/rust/benchmarks/tpch/tpchgen.dockerfile b/rust/ballista/rust/benchmarks/tpch/tpchgen.dockerfile
deleted file mode 100644
index 7fc2e50..0000000
--- a/rust/ballista/rust/benchmarks/tpch/tpchgen.dockerfile
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-FROM ubuntu
-
-RUN apt-get update && \
-    apt-get install -y git build-essential
-
-RUN git clone https://github.com/databricks/tpch-dbgen.git && \
-    cd tpch-dbgen && \
-    make
-
-WORKDIR /tpch-dbgen
-ADD entrypoint.sh /tpch-dbgen/
-
-VOLUME data
-
-ENTRYPOINT [ "bash", "./entrypoint.sh" ]
diff --git a/rust/ballista/rust/client/Cargo.toml b/rust/ballista/rust/client/Cargo.toml
deleted file mode 100644
index de3effe..0000000
--- a/rust/ballista/rust/client/Cargo.toml
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-[package]
-name = "ballista"
-description = "Ballista Distributed Compute"
-license = "Apache-2.0"
-version = "0.4.2-SNAPSHOT"
-homepage = "https://github.com/apache/arrow"
-repository = "https://github.com/apache/arrow"
-authors = ["Apache Arrow <de...@arrow.apache.org>"]
-edition = "2018"
-
-[dependencies]
-ballista-core = { path = "../core" }
-futures = "0.3"
-log = "0.4"
-tokio = "1.0"
-
-arrow = { path = "../../../arrow"  }
-datafusion = { path = "../../../datafusion" }
\ No newline at end of file
diff --git a/rust/ballista/rust/client/README.md b/rust/ballista/rust/client/README.md
deleted file mode 100644
index 00bf3ea..0000000
--- a/rust/ballista/rust/client/README.md
+++ /dev/null
@@ -1,22 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Ballista - Rust
-This crate contains the Ballista client library. For an example usage, please refer [here](../benchmarks/tpch/README.md).
-
diff --git a/rust/ballista/rust/client/src/columnar_batch.rs b/rust/ballista/rust/client/src/columnar_batch.rs
deleted file mode 100644
index d3ff886..0000000
--- a/rust/ballista/rust/client/src/columnar_batch.rs
+++ /dev/null
@@ -1,167 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-use std::{collections::HashMap, sync::Arc};
-
-use ballista_core::error::{ballista_error, Result};
-
-use arrow::{
-    array::ArrayRef,
-    datatypes::{DataType, Schema},
-    record_batch::RecordBatch,
-};
-use datafusion::scalar::ScalarValue;
-
-pub type MaybeColumnarBatch = Result<Option<ColumnarBatch>>;
-
-/// Batch of columnar data.
-#[allow(dead_code)]
-#[derive(Debug, Clone)]
-
-pub struct ColumnarBatch {
-    schema: Arc<Schema>,
-    columns: HashMap<String, ColumnarValue>,
-}
-
-impl ColumnarBatch {
-    pub fn from_arrow(batch: &RecordBatch) -> Self {
-        let columns = batch
-            .columns()
-            .iter()
-            .enumerate()
-            .map(|(i, array)| {
-                (
-                    batch.schema().field(i).name().clone(),
-                    ColumnarValue::Columnar(array.clone()),
-                )
-            })
-            .collect();
-
-        Self {
-            schema: batch.schema(),
-            columns,
-        }
-    }
-
-    pub fn from_values(values: &[ColumnarValue], schema: &Schema) -> Self {
-        let columns = schema
-            .fields()
-            .iter()
-            .enumerate()
-            .map(|(i, f)| (f.name().clone(), values[i].clone()))
-            .collect();
-
-        Self {
-            schema: Arc::new(schema.clone()),
-            columns,
-        }
-    }
-
-    pub fn to_arrow(&self) -> Result<RecordBatch> {
-        let arrays = self
-            .schema
-            .fields()
-            .iter()
-            .map(|c| {
-                match self.column(c.name())? {
-                    ColumnarValue::Columnar(array) => Ok(array.clone()),
-                    ColumnarValue::Scalar(_, _) => {
-                        // note that this can be implemented easily if needed
-                        Err(ballista_error("Cannot convert scalar value to Arrow array"))
-                    }
-                }
-            })
-            .collect::<Result<Vec<_>>>()?;
-
-        Ok(RecordBatch::try_new(self.schema.clone(), arrays)?)
-    }
-
-    pub fn schema(&self) -> Arc<Schema> {
-        self.schema.clone()
-    }
-
-    pub fn num_columns(&self) -> usize {
-        self.columns.len()
-    }
-
-    pub fn num_rows(&self) -> usize {
-        self.columns[self.schema.field(0).name()].len()
-    }
-
-    pub fn column(&self, name: &str) -> Result<&ColumnarValue> {
-        Ok(&self.columns[name])
-    }
-
-    pub fn memory_size(&self) -> usize {
-        self.columns.values().map(|c| c.memory_size()).sum()
-    }
-}
-
-/// A columnar value can either be a scalar value or an Arrow array.
-#[allow(dead_code)]
-#[derive(Debug, Clone)]
-
-pub enum ColumnarValue {
-    Scalar(ScalarValue, usize),
-    Columnar(ArrayRef),
-}
-
-impl ColumnarValue {
-    pub fn len(&self) -> usize {
-        match self {
-            ColumnarValue::Scalar(_, n) => *n,
-            ColumnarValue::Columnar(array) => array.len(),
-        }
-    }
-
-    pub fn is_empty(&self) -> bool {
-        self.len() == 0
-    }
-
-    pub fn data_type(&self) -> &DataType {
-        match self {
-            ColumnarValue::Columnar(array) => array.data_type(),
-            ColumnarValue::Scalar(value, _) => match value {
-                ScalarValue::UInt8(_) => &DataType::UInt8,
-                ScalarValue::UInt16(_) => &DataType::UInt16,
-                ScalarValue::UInt32(_) => &DataType::UInt32,
-                ScalarValue::UInt64(_) => &DataType::UInt64,
-                ScalarValue::Int8(_) => &DataType::Int8,
-                ScalarValue::Int16(_) => &DataType::Int16,
-                ScalarValue::Int32(_) => &DataType::Int32,
-                ScalarValue::Int64(_) => &DataType::Int64,
-                ScalarValue::Float32(_) => &DataType::Float32,
-                ScalarValue::Float64(_) => &DataType::Float64,
-                _ => unimplemented!(),
-            },
-        }
-    }
-
-    pub fn to_arrow(&self) -> ArrayRef {
-        match self {
-            ColumnarValue::Columnar(array) => array.clone(),
-            ColumnarValue::Scalar(value, n) => value.to_array_of_size(*n),
-        }
-    }
-
-    pub fn memory_size(&self) -> usize {
-        match self {
-            ColumnarValue::Columnar(array) => array.get_array_memory_size(),
-            _ => 0,
-        }
-    }
-}
diff --git a/rust/ballista/rust/client/src/context.rs b/rust/ballista/rust/client/src/context.rs
deleted file mode 100644
index 400f6b6..0000000
--- a/rust/ballista/rust/client/src/context.rs
+++ /dev/null
@@ -1,400 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! Distributed execution context.
-
-use std::path::PathBuf;
-use std::pin::Pin;
-use std::sync::{Arc, Mutex};
-use std::{collections::HashMap, convert::TryInto};
-use std::{fs, time::Duration};
-
-use ballista_core::serde::protobuf::scheduler_grpc_client::SchedulerGrpcClient;
-use ballista_core::serde::protobuf::{
-    execute_query_params::Query, job_status, ExecuteQueryParams, GetJobStatusParams,
-    GetJobStatusResult,
-};
-use ballista_core::{
-    client::BallistaClient,
-    datasource::DFTableAdapter,
-    error::{BallistaError, Result},
-    memory_stream::MemoryStream,
-    utils::create_datafusion_context,
-};
-
-use arrow::datatypes::Schema;
-use datafusion::catalog::TableReference;
-use datafusion::logical_plan::{DFSchema, Expr, LogicalPlan, Partitioning};
-use datafusion::physical_plan::csv::CsvReadOptions;
-use datafusion::{dataframe::DataFrame, physical_plan::RecordBatchStream};
-use log::{error, info};
-
-#[allow(dead_code)]
-struct BallistaContextState {
-    /// Scheduler host
-    scheduler_host: String,
-    /// Scheduler port
-    scheduler_port: u16,
-    /// Tables that have been registered with this context
-    tables: HashMap<String, LogicalPlan>,
-    /// General purpose settings
-    settings: HashMap<String, String>,
-}
-
-impl BallistaContextState {
-    pub fn new(
-        scheduler_host: String,
-        scheduler_port: u16,
-        settings: HashMap<String, String>,
-    ) -> Self {
-        Self {
-            scheduler_host,
-            scheduler_port,
-            tables: HashMap::new(),
-            settings,
-        }
-    }
-}
-
-#[allow(dead_code)]
-
-pub struct BallistaContext {
-    state: Arc<Mutex<BallistaContextState>>,
-}
-
-impl BallistaContext {
-    /// Create a context for executing queries against a remote Ballista scheduler instance
-    pub fn remote(host: &str, port: u16, settings: HashMap<String, String>) -> Self {
-        let state = BallistaContextState::new(host.to_owned(), port, settings);
-
-        Self {
-            state: Arc::new(Mutex::new(state)),
-        }
-    }
-
-    /// Create a DataFrame representing a Parquet table scan
-
-    pub fn read_parquet(&self, path: &str) -> Result<BallistaDataFrame> {
-        // convert to absolute path because the executor likely has a different working directory
-        let path = PathBuf::from(path);
-        let path = fs::canonicalize(&path)?;
-
-        // use local DataFusion context for now but later this might call the scheduler
-        let mut ctx = create_datafusion_context();
-        let df = ctx.read_parquet(path.to_str().unwrap())?;
-        Ok(BallistaDataFrame::from(self.state.clone(), df))
-    }
-
-    /// Create a DataFrame representing a CSV table scan
-
-    pub fn read_csv(
-        &self,
-        path: &str,
-        options: CsvReadOptions,
-    ) -> Result<BallistaDataFrame> {
-        // convert to absolute path because the executor likely has a different working directory
-        let path = PathBuf::from(path);
-        let path = fs::canonicalize(&path)?;
-
-        // use local DataFusion context for now but later this might call the scheduler
-        let mut ctx = create_datafusion_context();
-        let df = ctx.read_csv(path.to_str().unwrap(), options)?;
-        Ok(BallistaDataFrame::from(self.state.clone(), df))
-    }
-
-    /// Register a DataFrame as a table that can be referenced from a SQL query
-    pub fn register_table(&self, name: &str, table: &BallistaDataFrame) -> Result<()> {
-        let mut state = self.state.lock().unwrap();
-        state
-            .tables
-            .insert(name.to_owned(), table.to_logical_plan());
-        Ok(())
-    }
-
-    pub fn register_csv(
-        &self,
-        name: &str,
-        path: &str,
-        options: CsvReadOptions,
-    ) -> Result<()> {
-        let df = self.read_csv(path, options)?;
-        self.register_table(name, &df)
-    }
-
-    pub fn register_parquet(&self, name: &str, path: &str) -> Result<()> {
-        let df = self.read_parquet(path)?;
-        self.register_table(name, &df)
-    }
-
-    /// Create a DataFrame from a SQL statement
-    pub fn sql(&self, sql: &str) -> Result<BallistaDataFrame> {
-        // use local DataFusion context for now but later this might call the scheduler
-        let mut ctx = create_datafusion_context();
-        // register tables
-        let state = self.state.lock().unwrap();
-        for (name, plan) in &state.tables {
-            let plan = ctx.optimize(plan)?;
-            let execution_plan = ctx.create_physical_plan(&plan)?;
-            ctx.register_table(
-                TableReference::Bare { table: name },
-                Arc::new(DFTableAdapter::new(plan, execution_plan)),
-            )?;
-        }
-        let df = ctx.sql(sql)?;
-        Ok(BallistaDataFrame::from(self.state.clone(), df))
-    }
-}
-
-/// The Ballista DataFrame is a wrapper around the DataFusion DataFrame and overrides the
-/// `collect` method so that the query is executed against Ballista and not DataFusion.
-
-pub struct BallistaDataFrame {
-    /// Ballista context state
-    state: Arc<Mutex<BallistaContextState>>,
-    /// DataFusion DataFrame representing logical query plan
-    df: Arc<dyn DataFrame>,
-}
-
-impl BallistaDataFrame {
-    fn from(state: Arc<Mutex<BallistaContextState>>, df: Arc<dyn DataFrame>) -> Self {
-        Self { state, df }
-    }
-
-    pub async fn collect(&self) -> Result<Pin<Box<dyn RecordBatchStream + Send + Sync>>> {
-        let scheduler_url = {
-            let state = self.state.lock().unwrap();
-
-            format!("http://{}:{}", state.scheduler_host, state.scheduler_port)
-        };
-
-        info!("Connecting to Ballista scheduler at {}", scheduler_url);
-
-        let mut scheduler = SchedulerGrpcClient::connect(scheduler_url).await?;
-
-        let plan = self.df.to_logical_plan();
-        let schema: Schema = plan.schema().as_ref().clone().into();
-
-        let job_id = scheduler
-            .execute_query(ExecuteQueryParams {
-                query: Some(Query::LogicalPlan((&plan).try_into()?)),
-            })
-            .await?
-            .into_inner()
-            .job_id;
-
-        loop {
-            let GetJobStatusResult { status } = scheduler
-                .get_job_status(GetJobStatusParams {
-                    job_id: job_id.clone(),
-                })
-                .await?
-                .into_inner();
-            let status = status.and_then(|s| s.status).ok_or_else(|| {
-                BallistaError::Internal("Received empty status message".to_owned())
-            })?;
-            let wait_future = tokio::time::sleep(Duration::from_millis(100));
-            match status {
-                job_status::Status::Queued(_) => {
-                    info!("Job {} still queued...", job_id);
-                    wait_future.await;
-                }
-                job_status::Status::Running(_) => {
-                    info!("Job {} is running...", job_id);
-                    wait_future.await;
-                }
-                job_status::Status::Failed(err) => {
-                    let msg = format!("Job {} failed: {}", job_id, err.error);
-                    error!("{}", msg);
-                    break Err(BallistaError::General(msg));
-                }
-                job_status::Status::Completed(completed) => {
-                    // TODO: use streaming. Probably need to change the signature of fetch_partition to achieve that
-                    let mut result = vec![];
-                    for location in completed.partition_location {
-                        let metadata = location.executor_meta.ok_or_else(|| {
-                            BallistaError::Internal(
-                                "Received empty executor metadata".to_owned(),
-                            )
-                        })?;
-                        let partition_id = location.partition_id.ok_or_else(|| {
-                            BallistaError::Internal(
-                                "Received empty partition id".to_owned(),
-                            )
-                        })?;
-                        let mut ballista_client = BallistaClient::try_new(
-                            metadata.host.as_str(),
-                            metadata.port as u16,
-                        )
-                        .await?;
-                        let stream = ballista_client
-                            .fetch_partition(
-                                &partition_id.job_id,
-                                partition_id.stage_id as usize,
-                                partition_id.partition_id as usize,
-                            )
-                            .await?;
-                        result.append(
-                            &mut datafusion::physical_plan::common::collect(stream)
-                                .await?,
-                        );
-                    }
-                    break Ok(Box::pin(MemoryStream::try_new(
-                        result,
-                        Arc::new(schema),
-                        None,
-                    )?));
-                }
-            };
-        }
-    }
-
-    pub fn select_columns(&self, columns: &[&str]) -> Result<BallistaDataFrame> {
-        Ok(Self::from(
-            self.state.clone(),
-            self.df
-                .select_columns(columns)
-                .map_err(BallistaError::from)?,
-        ))
-    }
-
-    pub fn select(&self, expr: Vec<Expr>) -> Result<BallistaDataFrame> {
-        Ok(Self::from(
-            self.state.clone(),
-            self.df.select(expr).map_err(BallistaError::from)?,
-        ))
-    }
-
-    pub fn filter(&self, expr: Expr) -> Result<BallistaDataFrame> {
-        Ok(Self::from(
-            self.state.clone(),
-            self.df.filter(expr).map_err(BallistaError::from)?,
-        ))
-    }
-
-    pub fn aggregate(
-        &self,
-        group_expr: Vec<Expr>,
-        aggr_expr: Vec<Expr>,
-    ) -> Result<BallistaDataFrame> {
-        Ok(Self::from(
-            self.state.clone(),
-            self.df
-                .aggregate(group_expr, aggr_expr)
-                .map_err(BallistaError::from)?,
-        ))
-    }
-
-    pub fn limit(&self, n: usize) -> Result<BallistaDataFrame> {
-        Ok(Self::from(
-            self.state.clone(),
-            self.df.limit(n).map_err(BallistaError::from)?,
-        ))
-    }
-
-    pub fn sort(&self, expr: Vec<Expr>) -> Result<BallistaDataFrame> {
-        Ok(Self::from(
-            self.state.clone(),
-            self.df.sort(expr).map_err(BallistaError::from)?,
-        ))
-    }
-
-    // TODO lifetime issue
-    // pub fn join(&self, right: Arc<dyn DataFrame>, join_type: JoinType, left_cols: &[&str], right_cols: &[&str]) ->
-    // Result<BallistaDataFrame> {     Ok(Self::from(self.state.clone(), self.df.join(right, join_type, &left_cols,
-    // &right_cols).map_err(BallistaError::from)?)) }
-
-    pub fn repartition(
-        &self,
-        partitioning_scheme: Partitioning,
-    ) -> Result<BallistaDataFrame> {
-        Ok(Self::from(
-            self.state.clone(),
-            self.df
-                .repartition(partitioning_scheme)
-                .map_err(BallistaError::from)?,
-        ))
-    }
-
-    pub fn schema(&self) -> &DFSchema {
-        self.df.schema()
-    }
-
-    pub fn to_logical_plan(&self) -> LogicalPlan {
-        self.df.to_logical_plan()
-    }
-
-    pub fn explain(&self, verbose: bool) -> Result<BallistaDataFrame> {
-        Ok(Self::from(
-            self.state.clone(),
-            self.df.explain(verbose).map_err(BallistaError::from)?,
-        ))
-    }
-}
-
-// #[async_trait]
-// impl ExecutionContext for BallistaContext {
-//     async fn get_executor_ids(&self) -> Result<Vec<ExecutorMeta>> {
-//         match &self.config.discovery_mode {
-//             DiscoveryMode::Etcd => etcd_get_executors(&self.config.etcd_urls, "default").await,
-//             DiscoveryMode::Kubernetes => k8s_get_executors("default", "ballista").await,
-//             DiscoveryMode::Standalone => Err(ballista_error("Standalone mode not implemented yet")),
-//         }
-//     }
-//
-//     async fn execute_task(
-//         &self,
-//         executor_meta: ExecutorMeta,
-//         task: ExecutionTask,
-//     ) -> Result<ShuffleId> {
-//         // TODO what is the point of returning this info since it is based on input arg?
-//         let shuffle_id = ShuffleId::new(task.job_uuid, task.stage_id, task.partition_id);
-//
-//         let _ = execute_action(
-//             &executor_meta.host,
-//             executor_meta.port,
-//             &Action::Execute(task),
-//         )
-//         .await?;
-//
-//         Ok(shuffle_id)
-//     }
-//
-//     async fn read_shuffle(&self, shuffle_id: &ShuffleId) -> Result<Vec<ColumnarBatch>> {
-//         match self.shuffle_locations.get(shuffle_id) {
-//             Some(executor_meta) => {
-//                 let batches = execute_action(
-//                     &executor_meta.host,
-//                     executor_meta.port,
-//                     &Action::FetchShuffle(*shuffle_id),
-//                 )
-//                 .await?;
-//                 Ok(batches
-//                     .iter()
-//                     .map(|b| ColumnarBatch::from_arrow(b))
-//                     .collect())
-//             }
-//             _ => Err(ballista_error(&format!(
-//                 "Failed to resolve executor UUID for shuffle ID {:?}",
-//                 shuffle_id
-//             ))),
-//         }
-//     }
-//
-//     fn config(&self) -> ExecutorConfig {
-//         self.config.clone()
-//     }
-// }
diff --git a/rust/ballista/rust/client/src/lib.rs b/rust/ballista/rust/client/src/lib.rs
deleted file mode 100644
index c3c6291..0000000
--- a/rust/ballista/rust/client/src/lib.rs
+++ /dev/null
@@ -1,20 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-pub mod columnar_batch;
-pub mod context;
-pub mod prelude;
diff --git a/rust/ballista/rust/client/src/prelude.rs b/rust/ballista/rust/client/src/prelude.rs
deleted file mode 100644
index 2f940ae..0000000
--- a/rust/ballista/rust/client/src/prelude.rs
+++ /dev/null
@@ -1,23 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! Ballista Prelude (common imports)
-
-pub use crate::context::BallistaContext;
-pub use ballista_core::error::{BallistaError, Result};
-
-pub use futures::StreamExt;
diff --git a/rust/ballista/rust/core/Cargo.toml b/rust/ballista/rust/core/Cargo.toml
deleted file mode 100644
index e37a1ea..0000000
--- a/rust/ballista/rust/core/Cargo.toml
+++ /dev/null
@@ -1,50 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-[package]
-name = "ballista-core"
-description = "Ballista Distributed Compute"
-license = "Apache-2.0"
-version = "0.4.2-SNAPSHOT"
-homepage = "https://github.com/apache/arrow"
-repository = "https://github.com/apache/arrow"
-authors = ["Apache Arrow <de...@arrow.apache.org>"]
-edition = "2018"
-build = "build.rs"
-
-[features]
-simd = ["datafusion/simd"]
-
-[dependencies]
-async-trait = "0.1.36"
-futures = "0.3"
-log = "0.4"
-prost = "0.7"
-serde = {version = "1", features = ["derive"]}
-sqlparser = "0.8"
-tokio = "1.0"
-tonic = "0.4"
-uuid = { version = "0.8", features = ["v4"] }
-
-arrow = { path = "../../../arrow"  }
-arrow-flight = { path = "../../../arrow-flight"  }
-datafusion = { path = "../../../datafusion" }
-
-[dev-dependencies]
-
-[build-dependencies]
-tonic-build = { version = "0.4" }
diff --git a/rust/ballista/rust/core/README.md b/rust/ballista/rust/core/README.md
deleted file mode 100644
index f97952b..0000000
--- a/rust/ballista/rust/core/README.md
+++ /dev/null
@@ -1,21 +0,0 @@
-<!---
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-# Ballista - Rust
-This crate contains the core Ballista types.
diff --git a/rust/ballista/rust/core/build.rs b/rust/ballista/rust/core/build.rs
deleted file mode 100644
index 6ad153e..0000000
--- a/rust/ballista/rust/core/build.rs
+++ /dev/null
@@ -1,26 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-fn main() -> Result<(), String> {
-    // for use in docker build where file changes can be wonky
-    println!("cargo:rerun-if-env-changed=FORCE_REBUILD");
-
-    println!("cargo:rerun-if-changed=proto/ballista.proto");
-    tonic_build::configure()
-        .compile(&["proto/ballista.proto"], &["proto"])
-        .map_err(|e| format!("protobuf compilation failed: {}", e))
-}
diff --git a/rust/ballista/rust/core/proto/ballista.proto b/rust/ballista/rust/core/proto/ballista.proto
deleted file mode 100644
index 5733921..0000000
--- a/rust/ballista/rust/core/proto/ballista.proto
+++ /dev/null
@@ -1,824 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-syntax = "proto3";
-
-package ballista.protobuf;
-
-option java_multiple_files = true;
-option java_package = "org.ballistacompute.protobuf";
-option java_outer_classname = "BallistaProto";
-
-///////////////////////////////////////////////////////////////////////////////////////////////////
-// Ballista Logical Plan
-///////////////////////////////////////////////////////////////////////////////////////////////////
-
-// logical expressions
-message LogicalExprNode {
-  oneof ExprType {
-    // column references
-    string column_name = 1;
-    
-    // alias
-    AliasNode alias = 2;
-
-    ScalarValue literal = 3;
-
-
-    // binary expressions
-    BinaryExprNode binary_expr = 4;
-    
-    // aggregate expressions
-    AggregateExprNode aggregate_expr = 5;
-    
-    // null checks
-    IsNull is_null_expr = 6;
-    IsNotNull is_not_null_expr = 7;
-    Not not_expr = 8;
-    
-    BetweenNode between = 9;
-    CaseNode case_ = 10;
-    CastNode cast = 11;
-    SortExprNode sort = 12;
-    NegativeNode negative = 13;
-    InListNode in_list = 14;
-    bool wildcard = 15;
-    ScalarFunctionNode scalar_function = 16;
-    TryCastNode try_cast = 17;
-  }
-}
-
-message IsNull {
-  LogicalExprNode expr = 1;
-}
-
-message IsNotNull {
-  LogicalExprNode expr = 1;
-}
-
-message Not {
-  LogicalExprNode expr = 1;
-}
-
-message AliasNode {
-  LogicalExprNode expr = 1;
-  string alias = 2;
-}
-
-message BinaryExprNode {
-  LogicalExprNode l = 1;
-  LogicalExprNode r = 2;
-  string op = 3;
-}
-
-message NegativeNode {
-  LogicalExprNode expr = 1;
-}
-
-message InListNode {
-  LogicalExprNode expr = 1;
-  repeated LogicalExprNode list = 2;
-  bool negated = 3;
-}
-
-enum ScalarFunction {
-  SQRT = 0;
-  SIN = 1;
-  COS = 2;
-  TAN = 3;
-  ASIN = 4;
-  ACOS = 5;
-  ATAN = 6;
-  EXP = 7;
-  LOG = 8;
-  LOG2 = 9;
-  LOG10 = 10;
-  FLOOR = 11;
-  CEIL = 12;
-  ROUND = 13;
-  TRUNC = 14;
-  ABS = 15;
-  SIGNUM = 16;
-  OCTETLENGTH = 17;
-  CONCAT = 18;
-  LOWER = 19;
-  UPPER = 20;
-  TRIM = 21;
-  LTRIM = 22;
-  RTRIM = 23;
-  TOTIMESTAMP = 24;
-  ARRAY = 25;
-  NULLIF = 26;
-  DATETRUNC = 27;
-  MD5 = 28;
-  SHA224 = 29;
-  SHA256 = 30;
-  SHA384 = 31;
-  SHA512 = 32;
-}
-
-message ScalarFunctionNode {
-  ScalarFunction fun = 1;
-  repeated LogicalExprNode expr = 2;
-}
-
-enum AggregateFunction {
-  MIN = 0;
-  MAX = 1;
-  SUM = 2;
-  AVG = 3;
-  COUNT = 4;
-}
-
-message AggregateExprNode {
-  AggregateFunction aggr_function = 1;
-  LogicalExprNode expr = 2;
-}
-
-message BetweenNode {
-  LogicalExprNode expr = 1;
-  bool negated = 2;
-  LogicalExprNode low = 3;
-  LogicalExprNode high = 4;
-}
-
-message CaseNode {
-  LogicalExprNode expr = 1;
-  repeated WhenThen when_then_expr = 2;
-  LogicalExprNode else_expr = 3;
-}
-
-message WhenThen {
-  LogicalExprNode when_expr = 1;
-  LogicalExprNode then_expr = 2;
-}
-
-message CastNode {
-  LogicalExprNode expr = 1;
-  ArrowType arrow_type = 2;
-}
-
-message TryCastNode {
-  LogicalExprNode expr = 1;
-  ArrowType arrow_type = 2;
-}
-
-message SortExprNode {
-  LogicalExprNode expr = 1;
-  bool asc = 2;
-  bool nulls_first = 3;
-}
-
-// LogicalPlan is a nested type
-message LogicalPlanNode {
-  oneof LogicalPlanType {
-    CsvTableScanNode csv_scan = 1;
-    ParquetTableScanNode parquet_scan = 2;
-    ProjectionNode projection = 3;
-    SelectionNode selection = 4;
-    LimitNode limit = 5;
-    AggregateNode aggregate = 6;
-    JoinNode join = 7;
-    SortNode sort = 8;
-    RepartitionNode repartition = 9;
-    EmptyRelationNode empty_relation = 10;
-    CreateExternalTableNode create_external_table = 11;
-    ExplainNode explain = 12;
-  }
-}
-
-message ProjectionColumns {
-  repeated string columns = 1;
-}
-
-message CsvTableScanNode {
-  string table_name = 1;
-  string path = 2;
-  bool has_header = 3;
-  string delimiter = 4;
-  string file_extension = 5;
-  ProjectionColumns projection = 6;
-  Schema schema = 7;
-  repeated LogicalExprNode filters = 8;
-}
-
-message ParquetTableScanNode {
-  string table_name = 1;
-  string path = 2;
-  ProjectionColumns projection = 3;
-  Schema schema = 4;
-  repeated LogicalExprNode filters = 5;
-}
-
-message ProjectionNode {
-  LogicalPlanNode input = 1;
-  repeated LogicalExprNode expr = 2;
-}
-
-message SelectionNode {
-  LogicalPlanNode input = 1;
-  LogicalExprNode expr = 2;
-}
-
-message SortNode{
-  LogicalPlanNode input = 1;
-  repeated LogicalExprNode expr = 2;
-}
-
-message RepartitionNode{
-  LogicalPlanNode input = 1;
-  oneof partition_method {
-    uint64 round_robin = 2;
-    HashRepartition hash = 3;
-  }
-}
-
-message HashRepartition {
-  repeated LogicalExprNode hash_expr = 1;
-  uint64 partition_count = 2;
-}
-
-message EmptyRelationNode{
-  bool produce_one_row = 1;
-}
-
-message CreateExternalTableNode{
-  string name = 1;
-  string location = 2;
-  FileType file_type = 3;
-  bool has_header = 4;
-  Schema schema = 5;
-}
-
-enum FileType{
-  NdJson = 0;
-  Parquet = 1;
-  CSV = 2;
-}
-
-message ExplainNode{
-  LogicalPlanNode input = 1;
-  bool verbose = 2;
-}
-
-message DfField{
-  string qualifier = 2;
-  Field field = 1;
-}
-
-message AggregateNode {
-  LogicalPlanNode input = 1;
-  repeated LogicalExprNode group_expr = 2;
-  repeated LogicalExprNode aggr_expr = 3;
-}
-
-enum JoinType {
-  INNER = 0;
-  LEFT = 1;
-  RIGHT = 2;
-}
-
-message JoinNode {
-  LogicalPlanNode left = 1;
-  LogicalPlanNode right = 2;
-  JoinType join_type = 3;
-  repeated string left_join_column = 4;
-  repeated string right_join_column = 5;
-}
-
-message LimitNode {
-  LogicalPlanNode input = 1;
-  uint32 limit = 2;
-}
-
-message SelectionExecNode {
-  LogicalExprNode expr = 1;
-}
-
-///////////////////////////////////////////////////////////////////////////////////////////////////
-// Ballista Physical Plan
-///////////////////////////////////////////////////////////////////////////////////////////////////
-
-// PhysicalPlanNode is a nested type
-message PhysicalPlanNode {
-  oneof PhysicalPlanType {
-    ParquetScanExecNode parquet_scan = 1;
-    CsvScanExecNode csv_scan = 2;
-    EmptyExecNode empty = 3;
-    ProjectionExecNode projection = 4;
-    GlobalLimitExecNode global_limit = 6;
-    LocalLimitExecNode local_limit = 7;
-    HashAggregateExecNode hash_aggregate = 8;
-    HashJoinExecNode hash_join = 9;
-    ShuffleReaderExecNode shuffle_reader = 10;
-    SortExecNode sort = 11;
-    CoalesceBatchesExecNode coalesce_batches = 12;
-    FilterExecNode filter = 13;
-    MergeExecNode merge = 14;
-    UnresolvedShuffleExecNode unresolved = 15;
-    RepartitionExecNode repartition = 16;
-  }
-}
-
-message UnresolvedShuffleExecNode {
-  repeated uint32 query_stage_ids = 1;
-  Schema schema = 2;
-  uint32 partition_count = 3;
-}
-
-message FilterExecNode {
-  PhysicalPlanNode input = 1;
-  LogicalExprNode expr = 2;
-}
-
-message ParquetScanExecNode {
-  repeated string filename = 1;
-  repeated uint32 projection = 2;
-  uint32 num_partitions = 3;
-  uint32 batch_size = 4;
-}
-
-message CsvScanExecNode {
-  string path = 1;
-  repeated uint32 projection = 2;
-  Schema schema = 3;
-  string file_extension = 4;
-  bool has_header = 5;
-  uint32 batch_size = 6;
-  string delimiter = 7;
-  
-  // partition filenames
-  repeated string filename = 8;
-}
-
-message HashJoinExecNode {
-  PhysicalPlanNode left = 1;
-  PhysicalPlanNode right = 2;
-  repeated JoinOn on = 3;
-  JoinType join_type = 4;
-
-}
-
-message JoinOn {
-   string left = 1;
-   string right = 2;
-}
-
-
-message EmptyExecNode {
-  bool produce_one_row = 1;
-  Schema schema = 2;
-}
-
-message ProjectionExecNode {
-  PhysicalPlanNode input = 1;
-  repeated LogicalExprNode expr = 2;
-  repeated string expr_name = 3;
-}
-
-enum AggregateMode {
-  PARTIAL = 0;
-  FINAL = 1;
-}
-
-message HashAggregateExecNode {
-  repeated LogicalExprNode group_expr = 1;
-  repeated LogicalExprNode aggr_expr = 2;
-  AggregateMode mode = 3;
-  PhysicalPlanNode input = 4;
-  repeated string group_expr_name = 5;
-  repeated string aggr_expr_name = 6;
-  // we need the input schema to the partial aggregate to pass to the final aggregate
-  Schema input_schema = 7;
-}
-
-message ShuffleReaderExecNode {
-  repeated PartitionLocation partition_location = 1;
-  Schema schema = 2;
-}
-
-message GlobalLimitExecNode {
-  PhysicalPlanNode input = 1;
-  uint32 limit = 2;
-}
-
-message LocalLimitExecNode {
-  PhysicalPlanNode input = 1;
-  uint32 limit = 2;
-}
-
-message SortExecNode {
-  PhysicalPlanNode input = 1;
-  repeated LogicalExprNode expr = 2;
-}
-
-message CoalesceBatchesExecNode {
-  PhysicalPlanNode input = 1;
-  uint32 target_batch_size = 2;
-}
-
-message MergeExecNode {
-  PhysicalPlanNode input = 1;
-}
-
-message RepartitionExecNode{
-  PhysicalPlanNode input = 1;
-  oneof partition_method {
-    uint64 round_robin = 2;
-    HashRepartition hash = 3;
-    uint64 unknown = 4;
-  }
-}
-
-///////////////////////////////////////////////////////////////////////////////////////////////////
-// Ballista Scheduling
-///////////////////////////////////////////////////////////////////////////////////////////////////
-
-message KeyValuePair {
-  string key = 1;
-  string value = 2;
-}
-
-message Action {
-
-  oneof ActionType {
-    // Execute a logical query plan
-    LogicalPlanNode query = 1;
-
-    // Execute one partition of a physical query plan
-    ExecutePartition execute_partition = 2;
-
-    // Fetch a partition from an executor
-    PartitionId fetch_partition = 3;
-  }
-  
-  // configuration settings
-  repeated KeyValuePair settings = 100;
-}
-
-message ExecutePartition {
-  string job_id = 1;
-  uint32 stage_id = 2;
-  repeated uint32 partition_id = 3;
-  PhysicalPlanNode plan = 4;
-  // The task could need to read partitions from other executors
-  repeated PartitionLocation partition_location = 5;
-}
-
-// Mapping from partition id to executor id
-message PartitionLocation {
-  PartitionId partition_id = 1;
-  ExecutorMetadata executor_meta = 2;
-  PartitionStats partition_stats = 3;
-}
-
-// Unique identifier for a materialized partition of data
-message PartitionId {
-  string job_id = 1;
-  uint32 stage_id = 2;
-  uint32 partition_id = 4;
-}
-
-message PartitionStats {
-  int64 num_rows = 1;
-  int64 num_batches = 2;
-  int64 num_bytes = 3;
-  repeated ColumnStats column_stats = 4;
-}
-
-message ColumnStats {
-  ScalarValue min_value = 1;
-  ScalarValue max_value = 2;
-  uint32 null_count = 3;
-  uint32 distinct_count = 4;
-}
-
-message ExecutorMetadata {
-  string id = 1;
-  string host = 2;
-  uint32 port = 3;
-}
-
-message GetExecutorMetadataParams {}
-
-message GetExecutorMetadataResult {
-  repeated ExecutorMetadata metadata = 1;
-}
-
-message RunningTask {
-  string executor_id = 1;
-}
-
-message FailedTask {
-  string error = 1;
-}
-
-message CompletedTask {
-  string executor_id = 1;
-}
-
-message TaskStatus {
-  PartitionId partition_id = 1;
-  oneof status {
-    RunningTask running = 2;
-    FailedTask failed = 3;
-    CompletedTask completed = 4;
-  }
-}
-
-message PollWorkParams {
-  ExecutorMetadata metadata = 1;
-  bool can_accept_task = 2;
-  // All tasks must be reported until they reach the failed or completed state
-  repeated TaskStatus task_status = 3;
-}
-
-message TaskDefinition {
-  PartitionId task_id = 1;
-  PhysicalPlanNode plan = 2;
-}
-
-message PollWorkResult {
-  TaskDefinition task = 1;
-}
-
-message ExecuteQueryParams {
-  oneof query {
-    LogicalPlanNode logical_plan = 1;
-    string sql = 2;
-  }}
-
-message ExecuteSqlParams {
-  string sql = 1;
-}
-
-message ExecuteQueryResult {
-  string job_id = 1;
-}
-
-message GetJobStatusParams {
-  string job_id = 1;
-}
-
-message CompletedJob {
-  repeated PartitionLocation partition_location = 1;
-}
-
-message QueuedJob {}
-
-// TODO: add progress report
-message RunningJob {}
-
-message FailedJob {
-  string error = 1;
-}
-
-message JobStatus {
-  oneof status {
-    QueuedJob queued = 1;
-    RunningJob running = 2;
-    FailedJob failed = 3;
-    CompletedJob completed = 4;
-  }
-}
-
-message GetJobStatusResult {
-  JobStatus status = 1;
-}
-
-message GetFileMetadataParams {
-  string path = 1;
-  FileType file_type = 2;
-}
-
-message GetFileMetadataResult {
-  Schema schema = 1;
-  repeated FilePartitionMetadata partitions = 2;
-}
-
-message FilePartitionMetadata {
-  repeated string filename = 1;
-}
-
-service SchedulerGrpc {
-  rpc GetExecutorsMetadata (GetExecutorMetadataParams) returns (GetExecutorMetadataResult) {}
-
-  // Executors must poll the scheduler for heartbeat and to receive tasks
-  rpc PollWork (PollWorkParams) returns (PollWorkResult) {}
-
-  rpc GetFileMetadata (GetFileMetadataParams) returns (GetFileMetadataResult) {}
-
-  rpc ExecuteQuery (ExecuteQueryParams) returns (ExecuteQueryResult) {}
-
-  rpc GetJobStatus (GetJobStatusParams) returns (GetJobStatusResult) {}
-}
-
-///////////////////////////////////////////////////////////////////////////////////////////////////
-// Arrow Data Types
-///////////////////////////////////////////////////////////////////////////////////////////////////
-
-message Schema {
-  repeated Field columns = 1;
-}
-
-message Field {
-  // name of the field
-  string name = 1;
-  ArrowType arrow_type = 2;
-  bool nullable = 3;
-  // for complex data types like structs, unions
-  repeated Field children = 4;
-}
-
-message FixedSizeBinary{
-    int32 length = 1;
-}
-
-message Timestamp{
-    TimeUnit time_unit = 1;
-    string timezone = 2;
-}
-
-enum DateUnit{
-    Day = 0;
-    DateMillisecond = 1;
-}
-
-enum TimeUnit{
-    Second = 0;
-    TimeMillisecond = 1;
-    Microsecond = 2;
-    Nanosecond = 3;
-}
-
-enum IntervalUnit{
-    YearMonth = 0;
-    DayTime = 1;
-}
-
-message Decimal{
-    uint64 whole = 1;
-    uint64 fractional = 2;
-}
-
-message List{
-    Field field_type = 1;
-}
-
-message FixedSizeList{
-    Field field_type = 1;
-    int32 list_size = 2;
-}
-
-message Dictionary{
-    ArrowType key = 1;
-    ArrowType value = 2;
-}
-
-message Struct{
-    repeated Field sub_field_types = 1;
-}
-
-message Union{
-    repeated Field union_types = 1;
-}
-
-
-message ScalarListValue{
-    ScalarType datatype = 1;
-    repeated ScalarValue values = 2;
-}
-
-
-
-message ScalarValue{
-    oneof value{
-        bool   bool_value = 1;
-        string utf8_value = 2;
-        string large_utf8_value = 3;
-        int32  int8_value = 4;
-        int32  int16_value = 5;
-        int32  int32_value = 6;
-        int64  int64_value = 7;
-        uint32 uint8_value = 8;
-        uint32 uint16_value = 9;
-        uint32 uint32_value = 10;
-        uint64 uint64_value = 11;
-        float  float32_value = 12;
-        double float64_value = 13;
-        //Literal Date32 value always has a unit of day
-        int32  date_32_value = 14;
-        int64  time_microsecond_value = 15;
-        int64  time_nanosecond_value = 16;
-        ScalarListValue list_value = 17;
-        ScalarType null_list_value = 18;
-
-        PrimitiveScalarType null_value = 19;
-    }
-}
-
-// Contains all valid datafusion scalar type except for 
-// List
-enum PrimitiveScalarType{
-    
-    BOOL = 0;     // arrow::Type::BOOL
-    UINT8 = 1;    // arrow::Type::UINT8
-    INT8 = 2;     // arrow::Type::INT8
-    UINT16 = 3;   // represents arrow::Type fields in src/arrow/type.h
-    INT16 = 4;
-    UINT32 = 5;
-    INT32 = 6;
-    UINT64 = 7;
-    INT64 = 8;
-    FLOAT32 = 9;
-    FLOAT64 = 10;
-    UTF8 = 11;
-    LARGE_UTF8 = 12;
-    DATE32 = 13;
-    TIME_MICROSECOND = 14;
-    TIME_NANOSECOND = 15;
-    NULL = 16;
-}
-
-message ScalarType{
-    oneof datatype{
-        PrimitiveScalarType scalar = 1;
-        ScalarListType list = 2;
-    }
-}
-
-message ScalarListType{
-    repeated string field_names = 3;
-    PrimitiveScalarType deepest_type = 2;
-}
-
-// Broke out into multiple message types so that type 
-// metadata did not need to be in separate message
-//All types that are of the empty message types contain no additional metadata
-// about the type
-message ArrowType{
-    oneof arrow_type_enum{
-        EmptyMessage NONE = 1;     // arrow::Type::NA
-        EmptyMessage BOOL =  2;     // arrow::Type::BOOL
-        EmptyMessage UINT8 = 3;    // arrow::Type::UINT8
-        EmptyMessage INT8 =  4;     // arrow::Type::INT8
-        EmptyMessage UINT16 =5;   // represents arrow::Type fields in src/arrow/type.h
-        EmptyMessage INT16 = 6;
-        EmptyMessage UINT32 =7;
-        EmptyMessage INT32 = 8;
-        EmptyMessage UINT64 =9;
-        EmptyMessage INT64 =10 ;
-        EmptyMessage FLOAT16 =11 ;
-        EmptyMessage FLOAT32 =12 ; 
-        EmptyMessage FLOAT64 =13 ;
-        EmptyMessage UTF8 =14 ;
-        EmptyMessage LARGE_UTF8 = 32;
-        EmptyMessage BINARY =15 ;
-        int32 FIXED_SIZE_BINARY =16 ;
-        EmptyMessage LARGE_BINARY = 31;
-        EmptyMessage DATE32 =17 ;
-        EmptyMessage DATE64 =18 ;
-        TimeUnit DURATION = 19;
-        Timestamp TIMESTAMP =20 ;
-        TimeUnit TIME32 =21 ;
-        TimeUnit TIME64 =22 ;
-        IntervalUnit INTERVAL =23 ;
-        Decimal DECIMAL =24 ;
-        List LIST =25;
-        List LARGE_LIST = 26;
-        FixedSizeList FIXED_SIZE_LIST = 27;
-        Struct STRUCT =28;
-        Union UNION =29;
-        Dictionary DICTIONARY =30;
-    }
-}
-
-
-
-
-
-//Useful for representing an empty enum variant in rust
-// E.G. enum example{One, Two(i32)}
-// maps to 
-// message example{
-//    oneof{
-//        EmptyMessage One = 1;
-//        i32 Two = 2;
-//   }
-//}
-message EmptyMessage{}
diff --git a/rust/ballista/rust/core/src/client.rs b/rust/ballista/rust/core/src/client.rs
deleted file mode 100644
index f64f95f..0000000
--- a/rust/ballista/rust/core/src/client.rs
+++ /dev/null
@@ -1,224 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! Client API for sending requests to executors.
-
-use std::sync::Arc;
-use std::{collections::HashMap, pin::Pin};
-use std::{
-    convert::{TryFrom, TryInto},
-    task::{Context, Poll},
-};
-
-use crate::error::{ballista_error, BallistaError, Result};
-use crate::memory_stream::MemoryStream;
-use crate::serde::protobuf::{self};
-use crate::serde::scheduler::{
-    Action, ExecutePartition, ExecutePartitionResult, PartitionId, PartitionStats,
-};
-
-use arrow::record_batch::RecordBatch;
-use arrow::{
-    array::{StringArray, StructArray},
-    error::{ArrowError, Result as ArrowResult},
-};
-use arrow::{datatypes::Schema, datatypes::SchemaRef};
-use arrow_flight::utils::flight_data_to_arrow_batch;
-use arrow_flight::Ticket;
-use arrow_flight::{flight_service_client::FlightServiceClient, FlightData};
-use datafusion::physical_plan::common::collect;
-use datafusion::physical_plan::{ExecutionPlan, SendableRecordBatchStream};
-use datafusion::{logical_plan::LogicalPlan, physical_plan::RecordBatchStream};
-use futures::{Stream, StreamExt};
-use log::debug;
-use prost::Message;
-use tonic::Streaming;
-use uuid::Uuid;
-
-/// Client for interacting with Ballista executors.
-#[derive(Clone)]
-pub struct BallistaClient {
-    flight_client: FlightServiceClient<tonic::transport::channel::Channel>,
-}
-
-impl BallistaClient {
-    /// Create a new BallistaClient to connect to the executor listening on the specified
-    /// host and port
-
-    pub async fn try_new(host: &str, port: u16) -> Result<Self> {
-        let addr = format!("http://{}:{}", host, port);
-        debug!("BallistaClient connecting to {}", addr);
-        let flight_client =
-            FlightServiceClient::connect(addr.clone())
-                .await
-                .map_err(|e| {
-                    BallistaError::General(format!(
-                        "Error connecting to Ballista scheduler or executor at {}: {:?}",
-                        addr, e
-                    ))
-                })?;
-        debug!("BallistaClient connected OK");
-
-        Ok(Self { flight_client })
-    }
-
-    /// Execute one partition of a physical query plan against the executor
-    pub async fn execute_partition(
-        &mut self,
-        job_id: String,
-        stage_id: usize,
-        partition_id: Vec<usize>,
-        plan: Arc<dyn ExecutionPlan>,
-    ) -> Result<Vec<ExecutePartitionResult>> {
-        let action = Action::ExecutePartition(ExecutePartition {
-            job_id,
-            stage_id,
-            partition_id,
-            plan,
-            shuffle_locations: Default::default(),
-        });
-        let stream = self.execute_action(&action).await?;
-        let batches = collect(stream).await?;
-
-        batches
-            .iter()
-            .map(|batch| {
-                if batch.num_rows() != 1 {
-                    Err(BallistaError::General(
-                        "execute_partition received wrong number of rows".to_owned(),
-                    ))
-                } else {
-                    let path = batch
-                        .column(0)
-                        .as_any()
-                        .downcast_ref::<StringArray>()
-                        .expect(
-                            "execute_partition expected column 0 to be a StringArray",
-                        );
-
-                    let stats = batch
-                        .column(1)
-                        .as_any()
-                        .downcast_ref::<StructArray>()
-                        .expect(
-                            "execute_partition expected column 1 to be a StructArray",
-                        );
-
-                    Ok(ExecutePartitionResult::new(
-                        path.value(0),
-                        PartitionStats::from_arrow_struct_array(stats),
-                    ))
-                }
-            })
-            .collect::<Result<Vec<_>>>()
-    }
-
-    /// Fetch a partition from an executor
-    pub async fn fetch_partition(
-        &mut self,
-        job_id: &str,
-        stage_id: usize,
-        partition_id: usize,
-    ) -> Result<SendableRecordBatchStream> {
-        let action =
-            Action::FetchPartition(PartitionId::new(job_id, stage_id, partition_id));
-        self.execute_action(&action).await
-    }
-
-    /// Execute an action and retrieve the results
-    pub async fn execute_action(
-        &mut self,
-        action: &Action,
-    ) -> Result<SendableRecordBatchStream> {
-        let serialized_action: protobuf::Action = action.to_owned().try_into()?;
-
-        let mut buf: Vec<u8> = Vec::with_capacity(serialized_action.encoded_len());
-
-        serialized_action
-            .encode(&mut buf)
-            .map_err(|e| BallistaError::General(format!("{:?}", e)))?;
-
-        let request = tonic::Request::new(Ticket { ticket: buf });
-
-        let mut stream = self
-            .flight_client
-            .do_get(request)
-            .await
-            .map_err(|e| BallistaError::General(format!("{:?}", e)))?
-            .into_inner();
-
-        // the schema should be the first message returned, else client should error
-        match stream
-            .message()
-            .await
-            .map_err(|e| BallistaError::General(format!("{:?}", e)))?
-        {
-            Some(flight_data) => {
-                // convert FlightData to a stream
-                let schema = Arc::new(Schema::try_from(&flight_data)?);
-
-                // all the remaining stream messages should be dictionary and record batches
-                Ok(Box::pin(FlightDataStream::new(stream, schema)))
-            }
-            None => Err(ballista_error(
-                "Did not receive schema batch from flight server",
-            )),
-        }
-    }
-}
-
-struct FlightDataStream {
-    stream: Streaming<FlightData>,
-    schema: SchemaRef,
-}
-
-impl FlightDataStream {
-    pub fn new(stream: Streaming<FlightData>, schema: SchemaRef) -> Self {
-        Self { stream, schema }
-    }
-}
-
-impl Stream for FlightDataStream {
-    type Item = ArrowResult<RecordBatch>;
-
-    fn poll_next(
-        mut self: std::pin::Pin<&mut Self>,
-        cx: &mut Context<'_>,
-    ) -> Poll<Option<Self::Item>> {
-        self.stream.poll_next_unpin(cx).map(|x| match x {
-            Some(flight_data_chunk_result) => {
-                let converted_chunk = flight_data_chunk_result
-                    .map_err(|e| ArrowError::from_external_error(Box::new(e)))
-                    .and_then(|flight_data_chunk| {
-                        flight_data_to_arrow_batch(
-                            &flight_data_chunk,
-                            self.schema.clone(),
-                            &[],
-                        )
-                    });
-                Some(converted_chunk)
-            }
-            None => None,
-        })
-    }
-}
-
-impl RecordBatchStream for FlightDataStream {
-    fn schema(&self) -> SchemaRef {
-        self.schema.clone()
-    }
-}
diff --git a/rust/ballista/rust/core/src/datasource.rs b/rust/ballista/rust/core/src/datasource.rs
deleted file mode 100644
index 8ff0df4..0000000
--- a/rust/ballista/rust/core/src/datasource.rs
+++ /dev/null
@@ -1,72 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-use std::{any::Any, sync::Arc};
-
-use arrow::datatypes::SchemaRef;
-use datafusion::error::Result as DFResult;
-use datafusion::{
-    datasource::{datasource::Statistics, TableProvider},
-    logical_plan::{Expr, LogicalPlan},
-    physical_plan::ExecutionPlan,
-};
-
-/// This ugly adapter is needed because we use DataFusion's logical plan when building queries
-/// and when we register tables with DataFusion's `ExecutionContext` we need to provide a
-/// TableProvider which is effectively a wrapper around a physical plan. We need to be able to
-/// register tables so that we can create logical plans from SQL statements that reference these
-/// tables.
-pub struct DFTableAdapter {
-    /// DataFusion logical plan
-    pub logical_plan: LogicalPlan,
-    /// DataFusion execution plan
-    plan: Arc<dyn ExecutionPlan>,
-}
-
-impl DFTableAdapter {
-    pub fn new(logical_plan: LogicalPlan, plan: Arc<dyn ExecutionPlan>) -> Self {
-        Self { logical_plan, plan }
-    }
-}
-
-impl TableProvider for DFTableAdapter {
-    fn as_any(&self) -> &dyn Any {
-        self
-    }
-
-    fn schema(&self) -> SchemaRef {
-        self.plan.schema()
-    }
-
-    fn scan(
-        &self,
-        _projection: &Option<Vec<usize>>,
-        _batch_size: usize,
-        _filters: &[Expr],
-        _limit: Option<usize>,
-    ) -> DFResult<Arc<dyn ExecutionPlan>> {
-        Ok(self.plan.clone())
-    }
-
-    fn statistics(&self) -> Statistics {
-        Statistics {
-            num_rows: None,
-            total_byte_size: None,
-            column_statistics: None,
-        }
-    }
-}
diff --git a/rust/ballista/rust/core/src/error.rs b/rust/ballista/rust/core/src/error.rs
deleted file mode 100644
index d0155ce..0000000
--- a/rust/ballista/rust/core/src/error.rs
+++ /dev/null
@@ -1,172 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! Ballista error types
-
-use std::{
-    error::Error,
-    fmt::{Display, Formatter},
-    io, result,
-};
-
-use arrow::error::ArrowError;
-use datafusion::error::DataFusionError;
-use sqlparser::parser;
-
-pub type Result<T> = result::Result<T, BallistaError>;
-
-/// Ballista error
-#[derive(Debug)]
-pub enum BallistaError {
-    NotImplemented(String),
-    General(String),
-    Internal(String),
-    ArrowError(ArrowError),
-    DataFusionError(DataFusionError),
-    SqlError(parser::ParserError),
-    IoError(io::Error),
-    // ReqwestError(reqwest::Error),
-    //HttpError(http::Error),
-    // KubeAPIError(kube::error::Error),
-    // KubeAPIRequestError(k8s_openapi::RequestError),
-    // KubeAPIResponseError(k8s_openapi::ResponseError),
-    TonicError(tonic::transport::Error),
-    GrpcError(tonic::Status),
-    TokioError(tokio::task::JoinError),
-}
-
-impl<T> Into<Result<T>> for BallistaError {
-    fn into(self) -> Result<T> {
-        Err(self)
-    }
-}
-
-pub fn ballista_error(message: &str) -> BallistaError {
-    BallistaError::General(message.to_owned())
-}
-
-impl From<String> for BallistaError {
-    fn from(e: String) -> Self {
-        BallistaError::General(e)
-    }
-}
-
-impl From<ArrowError> for BallistaError {
-    fn from(e: ArrowError) -> Self {
-        BallistaError::ArrowError(e)
-    }
-}
-
-impl From<parser::ParserError> for BallistaError {
-    fn from(e: parser::ParserError) -> Self {
-        BallistaError::SqlError(e)
-    }
-}
-
-impl From<DataFusionError> for BallistaError {
-    fn from(e: DataFusionError) -> Self {
-        BallistaError::DataFusionError(e)
-    }
-}
-
-impl From<io::Error> for BallistaError {
-    fn from(e: io::Error) -> Self {
-        BallistaError::IoError(e)
-    }
-}
-
-// impl From<reqwest::Error> for BallistaError {
-//     fn from(e: reqwest::Error) -> Self {
-//         BallistaError::ReqwestError(e)
-//     }
-// }
-//
-// impl From<http::Error> for BallistaError {
-//     fn from(e: http::Error) -> Self {
-//         BallistaError::HttpError(e)
-//     }
-// }
-
-// impl From<kube::error::Error> for BallistaError {
-//     fn from(e: kube::error::Error) -> Self {
-//         BallistaError::KubeAPIError(e)
-//     }
-// }
-
-// impl From<k8s_openapi::RequestError> for BallistaError {
-//     fn from(e: k8s_openapi::RequestError) -> Self {
-//         BallistaError::KubeAPIRequestError(e)
-//     }
-// }
-
-// impl From<k8s_openapi::ResponseError> for BallistaError {
-//     fn from(e: k8s_openapi::ResponseError) -> Self {
-//         BallistaError::KubeAPIResponseError(e)
-//     }
-// }
-
-impl From<tonic::transport::Error> for BallistaError {
-    fn from(e: tonic::transport::Error) -> Self {
-        BallistaError::TonicError(e)
-    }
-}
-
-impl From<tonic::Status> for BallistaError {
-    fn from(e: tonic::Status) -> Self {
-        BallistaError::GrpcError(e)
-    }
-}
-
-impl From<tokio::task::JoinError> for BallistaError {
-    fn from(e: tokio::task::JoinError) -> Self {
-        BallistaError::TokioError(e)
-    }
-}
-
-impl Display for BallistaError {
-    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
-        match self {
-            BallistaError::NotImplemented(ref desc) => {
-                write!(f, "Not implemented: {}", desc)
-            }
-            BallistaError::General(ref desc) => write!(f, "General error: {}", desc),
-            BallistaError::ArrowError(ref desc) => write!(f, "Arrow error: {}", desc),
-            BallistaError::DataFusionError(ref desc) => {
-                write!(f, "DataFusion error: {:?}", desc)
-            }
-            BallistaError::SqlError(ref desc) => write!(f, "SQL error: {:?}", desc),
-            BallistaError::IoError(ref desc) => write!(f, "IO error: {}", desc),
-            // BallistaError::ReqwestError(ref desc) => write!(f, "Reqwest error: {}", desc),
-            // BallistaError::HttpError(ref desc) => write!(f, "HTTP error: {}", desc),
-            // BallistaError::KubeAPIError(ref desc) => write!(f, "Kube API error: {}", desc),
-            // BallistaError::KubeAPIRequestError(ref desc) => {
-            //     write!(f, "KubeAPI request error: {}", desc)
-            // }
-            // BallistaError::KubeAPIResponseError(ref desc) => {
-            //     write!(f, "KubeAPI response error: {}", desc)
-            // }
-            BallistaError::TonicError(desc) => write!(f, "Tonic error: {}", desc),
-            BallistaError::GrpcError(desc) => write!(f, "Grpc error: {}", desc),
-            BallistaError::Internal(desc) => {
-                write!(f, "Internal Ballista error: {}", desc)
-            }
-            BallistaError::TokioError(desc) => write!(f, "Tokio join error: {}", desc),
-        }
-    }
-}
-
-impl Error for BallistaError {}
diff --git a/rust/ballista/rust/core/src/execution_plans/mod.rs b/rust/ballista/rust/core/src/execution_plans/mod.rs
deleted file mode 100644
index 1fb2010..0000000
--- a/rust/ballista/rust/core/src/execution_plans/mod.rs
+++ /dev/null
@@ -1,27 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! This module contains execution plans that are needed to distribute Datafusion's execution plans into
-//! several Ballista executors.
-
-mod query_stage;
-mod shuffle_reader;
-mod unresolved_shuffle;
-
-pub use query_stage::QueryStageExec;
-pub use shuffle_reader::ShuffleReaderExec;
-pub use unresolved_shuffle::UnresolvedShuffleExec;
diff --git a/rust/ballista/rust/core/src/execution_plans/query_stage.rs b/rust/ballista/rust/core/src/execution_plans/query_stage.rs
deleted file mode 100644
index d8822ea..0000000
--- a/rust/ballista/rust/core/src/execution_plans/query_stage.rs
+++ /dev/null
@@ -1,92 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-use std::sync::Arc;
-use std::{any::Any, pin::Pin};
-
-use arrow::datatypes::SchemaRef;
-use async_trait::async_trait;
-use datafusion::physical_plan::{ExecutionPlan, Partitioning};
-use datafusion::{error::Result, physical_plan::RecordBatchStream};
-use uuid::Uuid;
-
-/// QueryStageExec represents a section of a query plan that has consistent partitioning and
-/// can be executed as one unit with each partition being executed in parallel. The output of
-/// a query stage either forms the input of another query stage or can be the final result of
-/// a query.
-#[derive(Debug, Clone)]
-pub struct QueryStageExec {
-    /// Unique ID for the job (query) that this stage is a part of
-    pub job_id: String,
-    /// Unique query stage ID within the job
-    pub stage_id: usize,
-    /// Physical execution plan for this query stage
-    pub child: Arc<dyn ExecutionPlan>,
-}
-
-impl QueryStageExec {
-    /// Create a new query stage
-    pub fn try_new(
-        job_id: String,
-        stage_id: usize,
-        child: Arc<dyn ExecutionPlan>,
-    ) -> Result<Self> {
-        Ok(Self {
-            job_id,
-            stage_id,
-            child,
-        })
-    }
-}
-
-#[async_trait]
-impl ExecutionPlan for QueryStageExec {
-    fn as_any(&self) -> &dyn Any {
-        self
-    }
-
-    fn schema(&self) -> SchemaRef {
-        self.child.schema()
-    }
-
-    fn output_partitioning(&self) -> Partitioning {
-        self.child.output_partitioning()
-    }
-
-    fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> {
-        vec![self.child.clone()]
-    }
-
-    fn with_new_children(
-        &self,
-        children: Vec<Arc<dyn ExecutionPlan>>,
-    ) -> Result<Arc<dyn ExecutionPlan>> {
-        assert!(children.len() == 1);
-        Ok(Arc::new(QueryStageExec::try_new(
-            self.job_id.clone(),
-            self.stage_id,
-            children[0].clone(),
-        )?))
-    }
-
-    async fn execute(
-        &self,
-        partition: usize,
-    ) -> Result<Pin<Box<dyn RecordBatchStream + Send + Sync>>> {
-        self.child.execute(partition).await
-    }
-}
diff --git a/rust/ballista/rust/core/src/execution_plans/shuffle_reader.rs b/rust/ballista/rust/core/src/execution_plans/shuffle_reader.rs
deleted file mode 100644
index bd8f6fd..0000000
--- a/rust/ballista/rust/core/src/execution_plans/shuffle_reader.rs
+++ /dev/null
@@ -1,106 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-use std::sync::Arc;
-use std::{any::Any, pin::Pin};
-
-use crate::client::BallistaClient;
-use crate::memory_stream::MemoryStream;
-use crate::serde::scheduler::PartitionLocation;
-
-use arrow::datatypes::SchemaRef;
-use async_trait::async_trait;
-use datafusion::physical_plan::{ExecutionPlan, Partitioning};
-use datafusion::{
-    error::{DataFusionError, Result},
-    physical_plan::RecordBatchStream,
-};
-use log::info;
-
-/// ShuffleReaderExec reads partitions that have already been materialized by an executor.
-#[derive(Debug, Clone)]
-pub struct ShuffleReaderExec {
-    // The query stage that is responsible for producing the shuffle partitions that
-    // this operator will read
-    pub(crate) partition_location: Vec<PartitionLocation>,
-    pub(crate) schema: SchemaRef,
-}
-
-impl ShuffleReaderExec {
-    /// Create a new ShuffleReaderExec
-    pub fn try_new(
-        partition_meta: Vec<PartitionLocation>,
-        schema: SchemaRef,
-    ) -> Result<Self> {
-        Ok(Self {
-            partition_location: partition_meta,
-            schema,
-        })
-    }
-}
-
-#[async_trait]
-impl ExecutionPlan for ShuffleReaderExec {
-    fn as_any(&self) -> &dyn Any {
-        self
-    }
-
-    fn schema(&self) -> SchemaRef {
-        self.schema.clone()
-    }
-
-    fn output_partitioning(&self) -> Partitioning {
-        Partitioning::UnknownPartitioning(self.partition_location.len())
-    }
-
-    fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> {
-        vec![]
-    }
-
-    fn with_new_children(
-        &self,
-        _children: Vec<Arc<dyn ExecutionPlan>>,
-    ) -> Result<Arc<dyn ExecutionPlan>> {
-        Err(DataFusionError::Plan(
-            "Ballista ShuffleReaderExec does not support with_new_children()".to_owned(),
-        ))
-    }
-
-    async fn execute(
-        &self,
-        partition: usize,
-    ) -> Result<Pin<Box<dyn RecordBatchStream + Send + Sync>>> {
-        info!("ShuffleReaderExec::execute({})", partition);
-        let partition_location = &self.partition_location[partition];
-
-        let mut client = BallistaClient::try_new(
-            &partition_location.executor_meta.host,
-            partition_location.executor_meta.port,
-        )
-        .await
-        .map_err(|e| DataFusionError::Execution(format!("Ballista Error: {:?}", e)))?;
-
-        client
-            .fetch_partition(
-                &partition_location.partition_id.job_id,
-                partition_location.partition_id.stage_id,
-                partition,
-            )
-            .await
-            .map_err(|e| DataFusionError::Execution(format!("Ballista Error: {:?}", e)))
-    }
-}
diff --git a/rust/ballista/rust/core/src/execution_plans/unresolved_shuffle.rs b/rust/ballista/rust/core/src/execution_plans/unresolved_shuffle.rs
deleted file mode 100644
index a62a251..0000000
--- a/rust/ballista/rust/core/src/execution_plans/unresolved_shuffle.rs
+++ /dev/null
@@ -1,101 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-use std::sync::Arc;
-use std::{any::Any, pin::Pin};
-
-use crate::client::BallistaClient;
-use crate::memory_stream::MemoryStream;
-use crate::serde::scheduler::PartitionLocation;
-
-use arrow::datatypes::SchemaRef;
-use async_trait::async_trait;
-use datafusion::physical_plan::{ExecutionPlan, Partitioning};
-use datafusion::{
-    error::{DataFusionError, Result},
-    physical_plan::RecordBatchStream,
-};
-use log::info;
-
-/// UnresolvedShuffleExec represents a dependency on the results of several QueryStageExec nodes which haven't been computed yet.
-///
-/// An ExecutionPlan that contains an UnresolvedShuffleExec isn't ready for execution. The presence of this ExecutionPlan
-/// is used as a signal so the scheduler knows it can't start computation on a specific QueryStageExec.
-#[derive(Debug, Clone)]
-pub struct UnresolvedShuffleExec {
-    // The query stage ids which needs to be computed
-    pub query_stage_ids: Vec<usize>,
-
-    // The schema this node will have once it is replaced with a ShuffleReaderExec
-    pub schema: SchemaRef,
-
-    // The partition count this node will have once it is replaced with a ShuffleReaderExec
-    pub partition_count: usize,
-}
-
-impl UnresolvedShuffleExec {
-    /// Create a new UnresolvedShuffleExec
-    pub fn new(
-        query_stage_ids: Vec<usize>,
-        schema: SchemaRef,
-        partition_count: usize,
-    ) -> Self {
-        Self {
-            query_stage_ids,
-            schema,
-            partition_count,
-        }
-    }
-}
-
-#[async_trait]
-impl ExecutionPlan for UnresolvedShuffleExec {
-    fn as_any(&self) -> &dyn Any {
-        self
-    }
-
-    fn schema(&self) -> SchemaRef {
-        self.schema.clone()
-    }
-
-    fn output_partitioning(&self) -> Partitioning {
-        Partitioning::UnknownPartitioning(self.partition_count)
-    }
-
-    fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> {
-        vec![]
-    }
-
-    fn with_new_children(
-        &self,
-        _children: Vec<Arc<dyn ExecutionPlan>>,
-    ) -> Result<Arc<dyn ExecutionPlan>> {
-        Err(DataFusionError::Plan(
-            "Ballista UnresolvedShuffleExec does not support with_new_children()"
-                .to_owned(),
-        ))
-    }
-
-    async fn execute(
-        &self,
-        _partition: usize,
-    ) -> Result<Pin<Box<dyn RecordBatchStream + Send + Sync>>> {
-        Err(DataFusionError::Plan(
-            "Ballista UnresolvedShuffleExec does not support execution".to_owned(),
-        ))
-    }
-}
diff --git a/rust/ballista/rust/core/src/lib.rs b/rust/ballista/rust/core/src/lib.rs
deleted file mode 100644
index 425dbab..0000000
--- a/rust/ballista/rust/core/src/lib.rs
+++ /dev/null
@@ -1,34 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! Ballista Distributed Compute
-#![allow(unused_imports)]
-pub const BALLISTA_VERSION: &str = env!("CARGO_PKG_VERSION");
-
-pub fn print_version() {
-    println!("Ballista version: {}", BALLISTA_VERSION)
-}
-
-pub mod client;
-pub mod datasource;
-pub mod error;
-pub mod execution_plans;
-pub mod memory_stream;
-pub mod utils;
-
-#[macro_use]
-pub mod serde;
diff --git a/rust/ballista/rust/core/src/memory_stream.rs b/rust/ballista/rust/core/src/memory_stream.rs
deleted file mode 100644
index 8bf5e20..0000000
--- a/rust/ballista/rust/core/src/memory_stream.rs
+++ /dev/null
@@ -1,93 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! This is copied from DataFusion because it is declared as `pub(crate)`. See
-//! https://issues.apache.org/jira/browse/ARROW-11276.
-
-use std::task::{Context, Poll};
-
-use arrow::{datatypes::SchemaRef, error::Result, record_batch::RecordBatch};
-use datafusion::physical_plan::RecordBatchStream;
-use futures::Stream;
-
-/// Iterator over batches
-
-pub struct MemoryStream {
-    /// Vector of record batches
-    data: Vec<RecordBatch>,
-    /// Schema representing the data
-    schema: SchemaRef,
-    /// Optional projection for which columns to load
-    projection: Option<Vec<usize>>,
-    /// Index into the data
-    index: usize,
-}
-
-impl MemoryStream {
-    /// Create an iterator for a vector of record batches
-
-    pub fn try_new(
-        data: Vec<RecordBatch>,
-        schema: SchemaRef,
-        projection: Option<Vec<usize>>,
-    ) -> Result<Self> {
-        Ok(Self {
-            data,
-            schema,
-            projection,
-            index: 0,
-        })
-    }
-}
-
-impl Stream for MemoryStream {
-    type Item = Result<RecordBatch>;
-
-    fn poll_next(
-        mut self: std::pin::Pin<&mut Self>,
-        _: &mut Context<'_>,
-    ) -> Poll<Option<Self::Item>> {
-        Poll::Ready(if self.index < self.data.len() {
-            self.index += 1;
-
-            let batch = &self.data[self.index - 1];
-
-            // apply projection
-            match &self.projection {
-                Some(columns) => Some(RecordBatch::try_new(
-                    self.schema.clone(),
-                    columns.iter().map(|i| batch.column(*i).clone()).collect(),
-                )),
-                None => Some(Ok(batch.clone())),
-            }
-        } else {
-            None
-        })
-    }
-
-    fn size_hint(&self) -> (usize, Option<usize>) {
-        (self.data.len(), Some(self.data.len()))
-    }
-}
-
-impl RecordBatchStream for MemoryStream {
-    /// Get the schema
-
-    fn schema(&self) -> SchemaRef {
-        self.schema.clone()
-    }
-}
diff --git a/rust/ballista/rust/core/src/serde/logical_plan/from_proto.rs b/rust/ballista/rust/core/src/serde/logical_plan/from_proto.rs
deleted file mode 100644
index 9308426..0000000
--- a/rust/ballista/rust/core/src/serde/logical_plan/from_proto.rs
+++ /dev/null
@@ -1,1200 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! Serde code to convert from protocol buffers to Rust data structures.
-
-use std::{
-    convert::{From, TryInto},
-    unimplemented,
-};
-
-use crate::error::BallistaError;
-use crate::serde::{proto_error, protobuf};
-use crate::{convert_box_required, convert_required};
-
-use arrow::datatypes::{DataType, Field, Schema};
-use datafusion::logical_plan::{
-    abs, acos, asin, atan, ceil, cos, exp, floor, log10, log2, round, signum, sin, sqrt,
-    tan, trunc, Expr, JoinType, LogicalPlan, LogicalPlanBuilder, Operator,
-};
-use datafusion::physical_plan::aggregates::AggregateFunction;
-use datafusion::physical_plan::csv::CsvReadOptions;
-use datafusion::scalar::ScalarValue;
-use protobuf::logical_plan_node::LogicalPlanType;
-use protobuf::{logical_expr_node::ExprType, scalar_type};
-
-// use uuid::Uuid;
-
-impl TryInto<LogicalPlan> for &protobuf::LogicalPlanNode {
-    type Error = BallistaError;
-
-    fn try_into(self) -> Result<LogicalPlan, Self::Error> {
-        let plan = self.logical_plan_type.as_ref().ok_or_else(|| {
-            proto_error(format!(
-                "logical_plan::from_proto() Unsupported logical plan '{:?}'",
-                self
-            ))
-        })?;
-        match plan {
-            LogicalPlanType::Projection(projection) => {
-                let input: LogicalPlan = convert_box_required!(projection.input)?;
-                let x: Vec<Expr> = projection
-                    .expr
-                    .iter()
-                    .map(|expr| expr.try_into())
-                    .collect::<Result<Vec<_>, _>>()?;
-                LogicalPlanBuilder::from(&input)
-                    .project(x)?
-                    .build()
-                    .map_err(|e| e.into())
-            }
-            LogicalPlanType::Selection(selection) => {
-                let input: LogicalPlan = convert_box_required!(selection.input)?;
-                LogicalPlanBuilder::from(&input)
-                    .filter(
-                        selection
-                            .expr
-                            .as_ref()
-                            .expect("expression required")
-                            .try_into()?,
-                    )?
-                    .build()
-                    .map_err(|e| e.into())
-            }
-            LogicalPlanType::Aggregate(aggregate) => {
-                let input: LogicalPlan = convert_box_required!(aggregate.input)?;
-                let group_expr = aggregate
-                    .group_expr
-                    .iter()
-                    .map(|expr| expr.try_into())
-                    .collect::<Result<Vec<_>, _>>()?;
-                let aggr_expr = aggregate
-                    .aggr_expr
-                    .iter()
-                    .map(|expr| expr.try_into())
-                    .collect::<Result<Vec<_>, _>>()?;
-                LogicalPlanBuilder::from(&input)
-                    .aggregate(group_expr, aggr_expr)?
-                    .build()
-                    .map_err(|e| e.into())
-            }
-            LogicalPlanType::CsvScan(scan) => {
-                let schema: Schema = convert_required!(scan.schema)?;
-                let options = CsvReadOptions::new()
-                    .schema(&schema)
-                    .delimiter(scan.delimiter.as_bytes()[0])
-                    .file_extension(&scan.file_extension)
-                    .has_header(scan.has_header);
-
-                let mut projection = None;
-                if let Some(column_names) = &scan.projection {
-                    let column_indices = column_names
-                        .columns
-                        .iter()
-                        .map(|name| schema.index_of(name))
-                        .collect::<Result<Vec<usize>, _>>()?;
-                    projection = Some(column_indices);
-                }
-
-                LogicalPlanBuilder::scan_csv(&scan.path, options, projection)?
-                    .build()
-                    .map_err(|e| e.into())
-            }
-            LogicalPlanType::ParquetScan(scan) => {
-                let projection = match scan.projection.as_ref() {
-                    None => None,
-                    Some(columns) => {
-                        let schema: Schema = convert_required!(scan.schema)?;
-                        let r: Result<Vec<usize>, _> = columns
-                            .columns
-                            .iter()
-                            .map(|col_name| {
-                                schema.fields().iter().position(|field| field.name() == col_name).ok_or_else(|| {
-                                    let column_names: Vec<&String> = schema.fields().iter().map(|f| f.name()).collect();
-                                    proto_error(format!(
-                                        "Parquet projection contains column name that is not present in schema. Column name: {}. Schema columns: {:?}",
-                                        col_name, column_names
-                                    ))
-                                })
-                            })
-                            .collect();
-                        Some(r?)
-                    }
-                };
-                LogicalPlanBuilder::scan_parquet(&scan.path, projection, 24)? //TODO concurrency
-                    .build()
-                    .map_err(|e| e.into())
-            }
-            LogicalPlanType::Sort(sort) => {
-                let input: LogicalPlan = convert_box_required!(sort.input)?;
-                let sort_expr: Vec<Expr> = sort
-                    .expr
-                    .iter()
-                    .map(|expr| expr.try_into())
-                    .collect::<Result<Vec<Expr>, _>>()?;
-                LogicalPlanBuilder::from(&input)
-                    .sort(sort_expr)?
-                    .build()
-                    .map_err(|e| e.into())
-            }
-            LogicalPlanType::Repartition(repartition) => {
-                use datafusion::logical_plan::Partitioning;
-                let input: LogicalPlan = convert_box_required!(repartition.input)?;
-                use protobuf::repartition_node::PartitionMethod;
-                let pb_partition_method = repartition.partition_method.clone().ok_or_else(|| {
-                    BallistaError::General(String::from(
-                        "Protobuf deserialization error, RepartitionNode was missing required field 'partition_method'",
-                    ))
-                })?;
-
-                let partitioning_scheme = match pb_partition_method {
-                    PartitionMethod::Hash(protobuf::HashRepartition {
-                        hash_expr: pb_hash_expr,
-                        partition_count,
-                    }) => Partitioning::Hash(
-                        pb_hash_expr
-                            .iter()
-                            .map(|pb_expr| pb_expr.try_into())
-                            .collect::<Result<Vec<_>, _>>()?,
-                        partition_count as usize,
-                    ),
-                    PartitionMethod::RoundRobin(batch_size) => {
-                        Partitioning::RoundRobinBatch(batch_size as usize)
-                    }
-                };
-
-                LogicalPlanBuilder::from(&input)
-                    .repartition(partitioning_scheme)?
-                    .build()
-                    .map_err(|e| e.into())
-            }
-            LogicalPlanType::EmptyRelation(empty_relation) => {
-                LogicalPlanBuilder::empty(empty_relation.produce_one_row)
-                    .build()
-                    .map_err(|e| e.into())
-            }
-            LogicalPlanType::CreateExternalTable(create_extern_table) => {
-                let pb_schema = (create_extern_table.schema.clone()).ok_or_else(|| {
-                    BallistaError::General(String::from(
-                        "Protobuf deserialization error, CreateExternalTableNode was missing required field schema.",
-                    ))
-                })?;
-
-                let pb_file_type: protobuf::FileType =
-                    create_extern_table.file_type.try_into()?;
-
-                Ok(LogicalPlan::CreateExternalTable {
-                    schema: pb_schema.try_into()?,
-                    name: create_extern_table.name.clone(),
-                    location: create_extern_table.location.clone(),
-                    file_type: pb_file_type.into(),
-                    has_header: create_extern_table.has_header,
-                })
-            }
-            LogicalPlanType::Explain(explain) => {
-                let input: LogicalPlan = convert_box_required!(explain.input)?;
-                LogicalPlanBuilder::from(&input)
-                    .explain(explain.verbose)?
-                    .build()
-                    .map_err(|e| e.into())
-            }
-            LogicalPlanType::Limit(limit) => {
-                let input: LogicalPlan = convert_box_required!(limit.input)?;
-                LogicalPlanBuilder::from(&input)
-                    .limit(limit.limit as usize)?
-                    .build()
-                    .map_err(|e| e.into())
-            }
-            LogicalPlanType::Join(join) => {
-                let left_keys: Vec<&str> =
-                    join.left_join_column.iter().map(|i| i.as_str()).collect();
-                let right_keys: Vec<&str> =
-                    join.right_join_column.iter().map(|i| i.as_str()).collect();
-                let join_type =
-                    protobuf::JoinType::from_i32(join.join_type).ok_or_else(|| {
-                        proto_error(format!(
-                            "Received a JoinNode message with unknown JoinType {}",
-                            join.join_type
-                        ))
-                    })?;
-                let join_type = match join_type {
-                    protobuf::JoinType::Inner => JoinType::Inner,
-                    protobuf::JoinType::Left => JoinType::Left,
-                    protobuf::JoinType::Right => JoinType::Right,
-                };
-                LogicalPlanBuilder::from(&convert_box_required!(join.left)?)
-                    .join(
-                        &convert_box_required!(join.right)?,
-                        join_type,
-                        &left_keys,
-                        &right_keys,
-                    )?
-                    .build()
-                    .map_err(|e| e.into())
-            }
-        }
-    }
-}
-
-impl TryInto<datafusion::logical_plan::DFSchema> for protobuf::Schema {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<datafusion::logical_plan::DFSchema, Self::Error> {
-        let schema: Schema = (&self).try_into()?;
-        schema.try_into().map_err(BallistaError::DataFusionError)
-    }
-}
-
-impl TryInto<datafusion::logical_plan::DFSchemaRef> for protobuf::Schema {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<datafusion::logical_plan::DFSchemaRef, Self::Error> {
-        use datafusion::logical_plan::ToDFSchema;
-        let schema: Schema = (&self).try_into()?;
-        schema
-            .to_dfschema_ref()
-            .map_err(BallistaError::DataFusionError)
-    }
-}
-
-impl TryInto<arrow::datatypes::DataType> for &protobuf::scalar_type::Datatype {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<arrow::datatypes::DataType, Self::Error> {
-        use protobuf::scalar_type::Datatype;
-        Ok(match self {
-            Datatype::Scalar(scalar_type) => {
-                let pb_scalar_enum = protobuf::PrimitiveScalarType::from_i32(*scalar_type).ok_or_else(|| {
-                    proto_error(format!(
-                        "Protobuf deserialization error, scalar_type::Datatype missing was provided invalid enum variant: {}",
-                        *scalar_type
-                    ))
-                })?;
-                pb_scalar_enum.into()
-            }
-            Datatype::List(protobuf::ScalarListType {
-                deepest_type,
-                field_names,
-            }) => {
-                if field_names.is_empty() {
-                    return Err(proto_error(
-                        "Protobuf deserialization error: found no field names in ScalarListType message which requires at least one",
-                    ));
-                }
-                let pb_scalar_type = protobuf::PrimitiveScalarType::from_i32(
-                    *deepest_type,
-                )
-                .ok_or_else(|| {
-                    proto_error(format!(
-                        "Protobuf deserialization error: invalid i32 for scalar enum: {}",
-                        *deepest_type
-                    ))
-                })?;
-                //Because length is checked above it is safe to unwrap .last()
-                let mut scalar_type =
-                    arrow::datatypes::DataType::List(Box::new(Field::new(
-                        field_names.last().unwrap().as_str(),
-                        pb_scalar_type.into(),
-                        true,
-                    )));
-                //Iterate over field names in reverse order except for the last item in the vector
-                for name in field_names.iter().rev().skip(1) {
-                    let new_datatype = arrow::datatypes::DataType::List(Box::new(
-                        Field::new(name.as_str(), scalar_type, true),
-                    ));
-                    scalar_type = new_datatype;
-                }
-                scalar_type
-            }
-        })
-    }
-}
-
-impl TryInto<arrow::datatypes::DataType> for &protobuf::arrow_type::ArrowTypeEnum {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<arrow::datatypes::DataType, Self::Error> {
-        use arrow::datatypes::DataType;
-        use protobuf::arrow_type;
-        Ok(match self {
-            arrow_type::ArrowTypeEnum::None(_) => DataType::Null,
-            arrow_type::ArrowTypeEnum::Bool(_) => DataType::Boolean,
-            arrow_type::ArrowTypeEnum::Uint8(_) => DataType::UInt8,
-            arrow_type::ArrowTypeEnum::Int8(_) => DataType::Int8,
-            arrow_type::ArrowTypeEnum::Uint16(_) => DataType::UInt16,
-            arrow_type::ArrowTypeEnum::Int16(_) => DataType::Int16,
-            arrow_type::ArrowTypeEnum::Uint32(_) => DataType::UInt32,
-            arrow_type::ArrowTypeEnum::Int32(_) => DataType::Int32,
-            arrow_type::ArrowTypeEnum::Uint64(_) => DataType::UInt64,
-            arrow_type::ArrowTypeEnum::Int64(_) => DataType::Int64,
-            arrow_type::ArrowTypeEnum::Float16(_) => DataType::Float16,
-            arrow_type::ArrowTypeEnum::Float32(_) => DataType::Float32,
-            arrow_type::ArrowTypeEnum::Float64(_) => DataType::Float64,
-            arrow_type::ArrowTypeEnum::Utf8(_) => DataType::Utf8,
-            arrow_type::ArrowTypeEnum::LargeUtf8(_) => DataType::LargeUtf8,
-            arrow_type::ArrowTypeEnum::Binary(_) => DataType::Binary,
-            arrow_type::ArrowTypeEnum::FixedSizeBinary(size) => {
-                DataType::FixedSizeBinary(*size)
-            }
-            arrow_type::ArrowTypeEnum::LargeBinary(_) => DataType::LargeBinary,
-            arrow_type::ArrowTypeEnum::Date32(_) => DataType::Date32,
-            arrow_type::ArrowTypeEnum::Date64(_) => DataType::Date64,
-            arrow_type::ArrowTypeEnum::Duration(time_unit) => {
-                DataType::Duration(protobuf::TimeUnit::from_i32_to_arrow(*time_unit)?)
-            }
-            arrow_type::ArrowTypeEnum::Timestamp(protobuf::Timestamp {
-                time_unit,
-                timezone,
-            }) => DataType::Timestamp(
-                protobuf::TimeUnit::from_i32_to_arrow(*time_unit)?,
-                match timezone.len() {
-                    0 => None,
-                    _ => Some(timezone.to_owned()),
-                },
-            ),
-            arrow_type::ArrowTypeEnum::Time32(time_unit) => {
-                DataType::Time32(protobuf::TimeUnit::from_i32_to_arrow(*time_unit)?)
-            }
-            arrow_type::ArrowTypeEnum::Time64(time_unit) => {
-                DataType::Time64(protobuf::TimeUnit::from_i32_to_arrow(*time_unit)?)
-            }
-            arrow_type::ArrowTypeEnum::Interval(interval_unit) => DataType::Interval(
-                protobuf::IntervalUnit::from_i32_to_arrow(*interval_unit)?,
-            ),
-            arrow_type::ArrowTypeEnum::Decimal(protobuf::Decimal {
-                whole,
-                fractional,
-            }) => DataType::Decimal(*whole as usize, *fractional as usize),
-            arrow_type::ArrowTypeEnum::List(list) => {
-                let list_type: &protobuf::Field = list
-                    .as_ref()
-                    .field_type
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: List message missing required field 'field_type'"))?
-                    .as_ref();
-                DataType::List(Box::new(list_type.try_into()?))
-            }
-            arrow_type::ArrowTypeEnum::LargeList(list) => {
-                let list_type: &protobuf::Field = list
-                    .as_ref()
-                    .field_type
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: List message missing required field 'field_type'"))?
-                    .as_ref();
-                DataType::LargeList(Box::new(list_type.try_into()?))
-            }
-            arrow_type::ArrowTypeEnum::FixedSizeList(list) => {
-                let list_type: &protobuf::Field = list
-                    .as_ref()
-                    .field_type
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: List message missing required field 'field_type'"))?
-                    .as_ref();
-                let list_size = list.list_size;
-                DataType::FixedSizeList(Box::new(list_type.try_into()?), list_size)
-            }
-            arrow_type::ArrowTypeEnum::Struct(strct) => DataType::Struct(
-                strct
-                    .sub_field_types
-                    .iter()
-                    .map(|field| field.try_into())
-                    .collect::<Result<Vec<_>, _>>()?,
-            ),
-            arrow_type::ArrowTypeEnum::Union(union) => DataType::Union(
-                union
-                    .union_types
-                    .iter()
-                    .map(|field| field.try_into())
-                    .collect::<Result<Vec<_>, _>>()?,
-            ),
-            arrow_type::ArrowTypeEnum::Dictionary(dict) => {
-                let pb_key_datatype = dict
-                    .as_ref()
-                    .key
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: Dictionary message missing required field 'key'"))?;
-                let pb_value_datatype = dict
-                    .as_ref()
-                    .value
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: Dictionary message missing required field 'key'"))?;
-                let key_datatype: DataType = pb_key_datatype.as_ref().try_into()?;
-                let value_datatype: DataType = pb_value_datatype.as_ref().try_into()?;
-                DataType::Dictionary(Box::new(key_datatype), Box::new(value_datatype))
-            }
-        })
-    }
-}
-
-impl Into<arrow::datatypes::DataType> for protobuf::PrimitiveScalarType {
-    fn into(self) -> arrow::datatypes::DataType {
-        use arrow::datatypes::DataType;
-        match self {
-            protobuf::PrimitiveScalarType::Bool => DataType::Boolean,
-            protobuf::PrimitiveScalarType::Uint8 => DataType::UInt8,
-            protobuf::PrimitiveScalarType::Int8 => DataType::Int8,
-            protobuf::PrimitiveScalarType::Uint16 => DataType::UInt16,
-            protobuf::PrimitiveScalarType::Int16 => DataType::Int16,
-            protobuf::PrimitiveScalarType::Uint32 => DataType::UInt32,
-            protobuf::PrimitiveScalarType::Int32 => DataType::Int32,
-            protobuf::PrimitiveScalarType::Uint64 => DataType::UInt64,
-            protobuf::PrimitiveScalarType::Int64 => DataType::Int64,
-            protobuf::PrimitiveScalarType::Float32 => DataType::Float32,
-            protobuf::PrimitiveScalarType::Float64 => DataType::Float64,
-            protobuf::PrimitiveScalarType::Utf8 => DataType::Utf8,
-            protobuf::PrimitiveScalarType::LargeUtf8 => DataType::LargeUtf8,
-            protobuf::PrimitiveScalarType::Date32 => DataType::Date32,
-            protobuf::PrimitiveScalarType::TimeMicrosecond => {
-                DataType::Time64(arrow::datatypes::TimeUnit::Microsecond)
-            }
-            protobuf::PrimitiveScalarType::TimeNanosecond => {
-                DataType::Time64(arrow::datatypes::TimeUnit::Nanosecond)
-            }
-            protobuf::PrimitiveScalarType::Null => DataType::Null,
-        }
-    }
-}
-
-//Does not typecheck lists
-fn typechecked_scalar_value_conversion(
-    tested_type: &protobuf::scalar_value::Value,
-    required_type: protobuf::PrimitiveScalarType,
-) -> Result<datafusion::scalar::ScalarValue, BallistaError> {
-    use protobuf::scalar_value::Value;
-    use protobuf::PrimitiveScalarType;
-    Ok(match (tested_type, &required_type) {
-        (Value::BoolValue(v), PrimitiveScalarType::Bool) => {
-            ScalarValue::Boolean(Some(*v))
-        }
-        (Value::Int8Value(v), PrimitiveScalarType::Int8) => {
-            ScalarValue::Int8(Some(*v as i8))
-        }
-        (Value::Int16Value(v), PrimitiveScalarType::Int16) => {
-            ScalarValue::Int16(Some(*v as i16))
-        }
-        (Value::Int32Value(v), PrimitiveScalarType::Int32) => {
-            ScalarValue::Int32(Some(*v))
-        }
-        (Value::Int64Value(v), PrimitiveScalarType::Int64) => {
-            ScalarValue::Int64(Some(*v))
-        }
-        (Value::Uint8Value(v), PrimitiveScalarType::Uint8) => {
-            ScalarValue::UInt8(Some(*v as u8))
-        }
-        (Value::Uint16Value(v), PrimitiveScalarType::Uint16) => {
-            ScalarValue::UInt16(Some(*v as u16))
-        }
-        (Value::Uint32Value(v), PrimitiveScalarType::Uint32) => {
-            ScalarValue::UInt32(Some(*v))
-        }
-        (Value::Uint64Value(v), PrimitiveScalarType::Uint64) => {
-            ScalarValue::UInt64(Some(*v))
-        }
-        (Value::Float32Value(v), PrimitiveScalarType::Float32) => {
-            ScalarValue::Float32(Some(*v))
-        }
-        (Value::Float64Value(v), PrimitiveScalarType::Float64) => {
-            ScalarValue::Float64(Some(*v))
-        }
-        (Value::Date32Value(v), PrimitiveScalarType::Date32) => {
-            ScalarValue::Date32(Some(*v))
-        }
-        (Value::TimeMicrosecondValue(v), PrimitiveScalarType::TimeMicrosecond) => {
-            ScalarValue::TimestampMicrosecond(Some(*v))
-        }
-        (Value::TimeNanosecondValue(v), PrimitiveScalarType::TimeMicrosecond) => {
-            ScalarValue::TimestampNanosecond(Some(*v))
-        }
-        (Value::Utf8Value(v), PrimitiveScalarType::Utf8) => {
-            ScalarValue::Utf8(Some(v.to_owned()))
-        }
-        (Value::LargeUtf8Value(v), PrimitiveScalarType::LargeUtf8) => {
-            ScalarValue::LargeUtf8(Some(v.to_owned()))
-        }
-
-        (Value::NullValue(i32_enum), required_scalar_type) => {
-            if *i32_enum == *required_scalar_type as i32 {
-                let pb_scalar_type = PrimitiveScalarType::from_i32(*i32_enum).ok_or_else(|| {
-                    BallistaError::General(format!(
-                        "Invalid i32_enum={} when converting with PrimitiveScalarType::from_i32()",
-                        *i32_enum
-                    ))
-                })?;
-                let scalar_value: ScalarValue = match pb_scalar_type {
-                    PrimitiveScalarType::Bool => ScalarValue::Boolean(None),
-                    PrimitiveScalarType::Uint8 => ScalarValue::UInt8(None),
-                    PrimitiveScalarType::Int8 => ScalarValue::Int8(None),
-                    PrimitiveScalarType::Uint16 => ScalarValue::UInt16(None),
-                    PrimitiveScalarType::Int16 => ScalarValue::Int16(None),
-                    PrimitiveScalarType::Uint32 => ScalarValue::UInt32(None),
-                    PrimitiveScalarType::Int32 => ScalarValue::Int32(None),
-                    PrimitiveScalarType::Uint64 => ScalarValue::UInt64(None),
-                    PrimitiveScalarType::Int64 => ScalarValue::Int64(None),
-                    PrimitiveScalarType::Float32 => ScalarValue::Float32(None),
-                    PrimitiveScalarType::Float64 => ScalarValue::Float64(None),
-                    PrimitiveScalarType::Utf8 => ScalarValue::Utf8(None),
-                    PrimitiveScalarType::LargeUtf8 => ScalarValue::LargeUtf8(None),
-                    PrimitiveScalarType::Date32 => ScalarValue::Date32(None),
-                    PrimitiveScalarType::TimeMicrosecond => {
-                        ScalarValue::TimestampMicrosecond(None)
-                    }
-                    PrimitiveScalarType::TimeNanosecond => {
-                        ScalarValue::TimestampNanosecond(None)
-                    }
-                    PrimitiveScalarType::Null => {
-                        return Err(proto_error(
-                            "Untyped scalar null is not a valid scalar value",
-                        ))
-                    }
-                };
-                scalar_value
-            } else {
-                return Err(proto_error("Could not convert to the proper type"));
-            }
-        }
-        _ => return Err(proto_error("Could not convert to the proper type")),
-    })
-}
-
-impl TryInto<datafusion::scalar::ScalarValue> for &protobuf::scalar_value::Value {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<datafusion::scalar::ScalarValue, Self::Error> {
-        use datafusion::scalar::ScalarValue;
-        use protobuf::PrimitiveScalarType;
-        let scalar = match self {
-            protobuf::scalar_value::Value::BoolValue(v) => ScalarValue::Boolean(Some(*v)),
-            protobuf::scalar_value::Value::Utf8Value(v) => {
-                ScalarValue::Utf8(Some(v.to_owned()))
-            }
-            protobuf::scalar_value::Value::LargeUtf8Value(v) => {
-                ScalarValue::LargeUtf8(Some(v.to_owned()))
-            }
-            protobuf::scalar_value::Value::Int8Value(v) => {
-                ScalarValue::Int8(Some(*v as i8))
-            }
-            protobuf::scalar_value::Value::Int16Value(v) => {
-                ScalarValue::Int16(Some(*v as i16))
-            }
-            protobuf::scalar_value::Value::Int32Value(v) => ScalarValue::Int32(Some(*v)),
-            protobuf::scalar_value::Value::Int64Value(v) => ScalarValue::Int64(Some(*v)),
-            protobuf::scalar_value::Value::Uint8Value(v) => {
-                ScalarValue::UInt8(Some(*v as u8))
-            }
-            protobuf::scalar_value::Value::Uint16Value(v) => {
-                ScalarValue::UInt16(Some(*v as u16))
-            }
-            protobuf::scalar_value::Value::Uint32Value(v) => {
-                ScalarValue::UInt32(Some(*v))
-            }
-            protobuf::scalar_value::Value::Uint64Value(v) => {
-                ScalarValue::UInt64(Some(*v))
-            }
-            protobuf::scalar_value::Value::Float32Value(v) => {
-                ScalarValue::Float32(Some(*v))
-            }
-            protobuf::scalar_value::Value::Float64Value(v) => {
-                ScalarValue::Float64(Some(*v))
-            }
-            protobuf::scalar_value::Value::Date32Value(v) => {
-                ScalarValue::Date32(Some(*v))
-            }
-            protobuf::scalar_value::Value::TimeMicrosecondValue(v) => {
-                ScalarValue::TimestampMicrosecond(Some(*v))
-            }
-            protobuf::scalar_value::Value::TimeNanosecondValue(v) => {
-                ScalarValue::TimestampNanosecond(Some(*v))
-            }
-            protobuf::scalar_value::Value::ListValue(v) => v.try_into()?,
-            protobuf::scalar_value::Value::NullListValue(v) => {
-                ScalarValue::List(None, v.try_into()?)
-            }
-            protobuf::scalar_value::Value::NullValue(null_enum) => {
-                PrimitiveScalarType::from_i32(*null_enum)
-                    .ok_or_else(|| proto_error("Invalid scalar type"))?
-                    .try_into()?
-            }
-        };
-        Ok(scalar)
-    }
-}
-
-impl TryInto<datafusion::scalar::ScalarValue> for &protobuf::ScalarListValue {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<datafusion::scalar::ScalarValue, Self::Error> {
-        use protobuf::scalar_type::Datatype;
-        use protobuf::PrimitiveScalarType;
-        let protobuf::ScalarListValue { datatype, values } = self;
-        let pb_scalar_type = datatype
-            .as_ref()
-            .ok_or_else(|| proto_error("Protobuf deserialization error: ScalarListValue messsage missing required field 'datatype'"))?;
-        let scalar_type = pb_scalar_type
-            .datatype
-            .as_ref()
-            .ok_or_else(|| proto_error("Protobuf deserialization error: ScalarListValue.Datatype messsage missing required field 'datatype'"))?;
-        let scalar_values = match scalar_type {
-            Datatype::Scalar(scalar_type_i32) => {
-                let leaf_scalar_type =
-                    protobuf::PrimitiveScalarType::from_i32(*scalar_type_i32)
-                        .ok_or_else(|| {
-                            proto_error("Error converting i32 to basic scalar type")
-                        })?;
-                let typechecked_values: Vec<datafusion::scalar::ScalarValue> = values
-                    .iter()
-                    .map(|protobuf::ScalarValue { value: opt_value }| {
-                        let value = opt_value.as_ref().ok_or_else(|| {
-                            proto_error(
-                                "Protobuf deserialization error: missing required field 'value'",
-                            )
-                        })?;
-                        typechecked_scalar_value_conversion(value, leaf_scalar_type)
-                    })
-                    .collect::<Result<Vec<_>, _>>()?;
-                datafusion::scalar::ScalarValue::List(
-                    Some(typechecked_values),
-                    leaf_scalar_type.into(),
-                )
-            }
-            Datatype::List(list_type) => {
-                let protobuf::ScalarListType {
-                    deepest_type,
-                    field_names,
-                } = &list_type;
-                let leaf_type =
-                    PrimitiveScalarType::from_i32(*deepest_type).ok_or_else(|| {
-                        proto_error("Error converting i32 to basic scalar type")
-                    })?;
-                let depth = field_names.len();
-
-                let typechecked_values: Vec<datafusion::scalar::ScalarValue> = if depth
-                    == 0
-                {
-                    return Err(proto_error(
-                        "Protobuf deserialization error, ScalarListType had no field names, requires at least one",
-                    ));
-                } else if depth == 1 {
-                    values
-                        .iter()
-                        .map(|protobuf::ScalarValue { value: opt_value }| {
-                            let value = opt_value
-                                .as_ref()
-                                .ok_or_else(|| proto_error("Protobuf deserialization error: missing required field 'value'"))?;
-                            typechecked_scalar_value_conversion(value, leaf_type)
-                        })
-                        .collect::<Result<Vec<_>, _>>()?
-                } else {
-                    values
-                        .iter()
-                        .map(|protobuf::ScalarValue { value: opt_value }| {
-                            let value = opt_value
-                                .as_ref()
-                                .ok_or_else(|| proto_error("Protobuf deserialization error: missing required field 'value'"))?;
-                            value.try_into()
-                        })
-                        .collect::<Result<Vec<_>, _>>()?
-                };
-                datafusion::scalar::ScalarValue::List(
-                    match typechecked_values.len() {
-                        0 => None,
-                        _ => Some(typechecked_values),
-                    },
-                    list_type.try_into()?,
-                )
-            }
-        };
-        Ok(scalar_values)
-    }
-}
-
-impl TryInto<arrow::datatypes::DataType> for &protobuf::ScalarListType {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<arrow::datatypes::DataType, Self::Error> {
-        use protobuf::PrimitiveScalarType;
-        let protobuf::ScalarListType {
-            deepest_type,
-            field_names,
-        } = self;
-
-        let depth = field_names.len();
-        if depth == 0 {
-            return Err(proto_error(
-                "Protobuf deserialization error: Found a ScalarListType message with no field names, at least one is required",
-            ));
-        }
-
-        let mut curr_type = arrow::datatypes::DataType::List(Box::new(Field::new(
-            //Since checked vector is not empty above this is safe to unwrap
-            field_names.last().unwrap(),
-            PrimitiveScalarType::from_i32(*deepest_type)
-                .ok_or_else(|| {
-                    proto_error("Could not convert to datafusion scalar type")
-                })?
-                .into(),
-            true,
-        )));
-        //Iterates over field names in reverse order except for the last item in the vector
-        for name in field_names.iter().rev().skip(1) {
-            let temp_curr_type = arrow::datatypes::DataType::List(Box::new(Field::new(
-                name, curr_type, true,
-            )));
-            curr_type = temp_curr_type;
-        }
-        Ok(curr_type)
-    }
-}
-
-impl TryInto<datafusion::scalar::ScalarValue> for protobuf::PrimitiveScalarType {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<datafusion::scalar::ScalarValue, Self::Error> {
-        use datafusion::scalar::ScalarValue;
-        Ok(match self {
-            protobuf::PrimitiveScalarType::Null => {
-                return Err(proto_error("Untyped null is an invalid scalar value"))
-            }
-            protobuf::PrimitiveScalarType::Bool => ScalarValue::Boolean(None),
-            protobuf::PrimitiveScalarType::Uint8 => ScalarValue::UInt8(None),
-            protobuf::PrimitiveScalarType::Int8 => ScalarValue::Int8(None),
-            protobuf::PrimitiveScalarType::Uint16 => ScalarValue::UInt16(None),
-            protobuf::PrimitiveScalarType::Int16 => ScalarValue::Int16(None),
-            protobuf::PrimitiveScalarType::Uint32 => ScalarValue::UInt32(None),
-            protobuf::PrimitiveScalarType::Int32 => ScalarValue::Int32(None),
-            protobuf::PrimitiveScalarType::Uint64 => ScalarValue::UInt64(None),
-            protobuf::PrimitiveScalarType::Int64 => ScalarValue::Int64(None),
-            protobuf::PrimitiveScalarType::Float32 => ScalarValue::Float32(None),
-            protobuf::PrimitiveScalarType::Float64 => ScalarValue::Float64(None),
-            protobuf::PrimitiveScalarType::Utf8 => ScalarValue::Utf8(None),
-            protobuf::PrimitiveScalarType::LargeUtf8 => ScalarValue::LargeUtf8(None),
-            protobuf::PrimitiveScalarType::Date32 => ScalarValue::Date32(None),
-            protobuf::PrimitiveScalarType::TimeMicrosecond => {
-                ScalarValue::TimestampMicrosecond(None)
-            }
-            protobuf::PrimitiveScalarType::TimeNanosecond => {
-                ScalarValue::TimestampNanosecond(None)
-            }
-        })
-    }
-}
-
-impl TryInto<datafusion::scalar::ScalarValue> for &protobuf::ScalarValue {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<datafusion::scalar::ScalarValue, Self::Error> {
-        let value = self.value.as_ref().ok_or_else(|| {
-            proto_error("Protobuf deserialization error: missing required field 'value'")
-        })?;
-        Ok(match value {
-            protobuf::scalar_value::Value::BoolValue(v) => ScalarValue::Boolean(Some(*v)),
-            protobuf::scalar_value::Value::Utf8Value(v) => {
-                ScalarValue::Utf8(Some(v.to_owned()))
-            }
-            protobuf::scalar_value::Value::LargeUtf8Value(v) => {
-                ScalarValue::LargeUtf8(Some(v.to_owned()))
-            }
-            protobuf::scalar_value::Value::Int8Value(v) => {
-                ScalarValue::Int8(Some(*v as i8))
-            }
-            protobuf::scalar_value::Value::Int16Value(v) => {
-                ScalarValue::Int16(Some(*v as i16))
-            }
-            protobuf::scalar_value::Value::Int32Value(v) => ScalarValue::Int32(Some(*v)),
-            protobuf::scalar_value::Value::Int64Value(v) => ScalarValue::Int64(Some(*v)),
-            protobuf::scalar_value::Value::Uint8Value(v) => {
-                ScalarValue::UInt8(Some(*v as u8))
-            }
-            protobuf::scalar_value::Value::Uint16Value(v) => {
-                ScalarValue::UInt16(Some(*v as u16))
-            }
-            protobuf::scalar_value::Value::Uint32Value(v) => {
-                ScalarValue::UInt32(Some(*v))
-            }
-            protobuf::scalar_value::Value::Uint64Value(v) => {
-                ScalarValue::UInt64(Some(*v))
-            }
-            protobuf::scalar_value::Value::Float32Value(v) => {
-                ScalarValue::Float32(Some(*v))
-            }
-            protobuf::scalar_value::Value::Float64Value(v) => {
-                ScalarValue::Float64(Some(*v))
-            }
-            protobuf::scalar_value::Value::Date32Value(v) => {
-                ScalarValue::Date32(Some(*v))
-            }
-            protobuf::scalar_value::Value::TimeMicrosecondValue(v) => {
-                ScalarValue::TimestampMicrosecond(Some(*v))
-            }
-            protobuf::scalar_value::Value::TimeNanosecondValue(v) => {
-                ScalarValue::TimestampNanosecond(Some(*v))
-            }
-            protobuf::scalar_value::Value::ListValue(scalar_list) => {
-                let protobuf::ScalarListValue {
-                    values,
-                    datatype: opt_scalar_type,
-                } = &scalar_list;
-                let pb_scalar_type = opt_scalar_type
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization err: ScalaListValue missing required field 'datatype'"))?;
-                let typechecked_values: Vec<ScalarValue> = values
-                    .iter()
-                    .map(|val| val.try_into())
-                    .collect::<Result<Vec<_>, _>>()?;
-                let scalar_type: arrow::datatypes::DataType =
-                    pb_scalar_type.try_into()?;
-                ScalarValue::List(Some(typechecked_values), scalar_type)
-            }
-            protobuf::scalar_value::Value::NullListValue(v) => {
-                let pb_datatype = v
-                    .datatype
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: NullListValue message missing required field 'datatyp'"))?;
-                ScalarValue::List(None, pb_datatype.try_into()?)
-            }
-            protobuf::scalar_value::Value::NullValue(v) => {
-                let null_type_enum = protobuf::PrimitiveScalarType::from_i32(*v)
-                    .ok_or_else(|| proto_error("Protobuf deserialization error found invalid enum variant for DatafusionScalar"))?;
-                null_type_enum.try_into()?
-            }
-        })
-    }
-}
-
-impl TryInto<Expr> for &protobuf::LogicalExprNode {
-    type Error = BallistaError;
-
-    fn try_into(self) -> Result<Expr, Self::Error> {
-        use protobuf::logical_expr_node::ExprType;
-
-        let expr_type = self
-            .expr_type
-            .as_ref()
-            .ok_or_else(|| proto_error("Unexpected empty logical expression"))?;
-        match expr_type {
-            ExprType::BinaryExpr(binary_expr) => Ok(Expr::BinaryExpr {
-                left: Box::new(parse_required_expr(&binary_expr.l)?),
-                op: from_proto_binary_op(&binary_expr.op)?,
-                right: Box::new(parse_required_expr(&binary_expr.r)?),
-            }),
-            ExprType::ColumnName(column_name) => Ok(Expr::Column(column_name.to_owned())),
-            ExprType::Literal(literal) => {
-                use datafusion::scalar::ScalarValue;
-                let scalar_value: datafusion::scalar::ScalarValue = literal.try_into()?;
-                Ok(Expr::Literal(scalar_value))
-            }
-            ExprType::AggregateExpr(expr) => {
-                let aggr_function =
-                    protobuf::AggregateFunction::from_i32(expr.aggr_function)
-                        .ok_or_else(|| {
-                            proto_error(format!(
-                                "Received an unknown aggregate function: {}",
-                                expr.aggr_function
-                            ))
-                        })?;
-                let fun = match aggr_function {
-                    protobuf::AggregateFunction::Min => AggregateFunction::Min,
-                    protobuf::AggregateFunction::Max => AggregateFunction::Max,
-                    protobuf::AggregateFunction::Sum => AggregateFunction::Sum,
-                    protobuf::AggregateFunction::Avg => AggregateFunction::Avg,
-                    protobuf::AggregateFunction::Count => AggregateFunction::Count,
-                };
-
-                Ok(Expr::AggregateFunction {
-                    fun,
-                    args: vec![parse_required_expr(&expr.expr)?],
-                    distinct: false, //TODO
-                })
-            }
-            ExprType::Alias(alias) => Ok(Expr::Alias(
-                Box::new(parse_required_expr(&alias.expr)?),
-                alias.alias.clone(),
-            )),
-            ExprType::IsNullExpr(is_null) => {
-                Ok(Expr::IsNull(Box::new(parse_required_expr(&is_null.expr)?)))
-            }
-            ExprType::IsNotNullExpr(is_not_null) => Ok(Expr::IsNotNull(Box::new(
-                parse_required_expr(&is_not_null.expr)?,
-            ))),
-            ExprType::NotExpr(not) => {
-                Ok(Expr::Not(Box::new(parse_required_expr(&not.expr)?)))
-            }
-            ExprType::Between(between) => Ok(Expr::Between {
-                expr: Box::new(parse_required_expr(&between.expr)?),
-                negated: between.negated,
-                low: Box::new(parse_required_expr(&between.low)?),
-                high: Box::new(parse_required_expr(&between.high)?),
-            }),
-            ExprType::Case(case) => {
-                let when_then_expr = case
-                    .when_then_expr
-                    .iter()
-                    .map(|e| {
-                        Ok((
-                            Box::new(match &e.when_expr {
-                                Some(e) => e.try_into(),
-                                None => Err(proto_error("Missing required expression")),
-                            }?),
-                            Box::new(match &e.then_expr {
-                                Some(e) => e.try_into(),
-                                None => Err(proto_error("Missing required expression")),
-                            }?),
-                        ))
-                    })
-                    .collect::<Result<Vec<(Box<Expr>, Box<Expr>)>, BallistaError>>()?;
-                Ok(Expr::Case {
-                    expr: parse_optional_expr(&case.expr)?.map(Box::new),
-                    when_then_expr,
-                    else_expr: parse_optional_expr(&case.else_expr)?.map(Box::new),
-                })
-            }
-            ExprType::Cast(cast) => {
-                let expr = Box::new(parse_required_expr(&cast.expr)?);
-                let arrow_type: &protobuf::ArrowType = cast
-                    .arrow_type
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: CastNode message missing required field 'arrow_type'"))?;
-                let data_type = arrow_type.try_into()?;
-                Ok(Expr::Cast { expr, data_type })
-            }
-            ExprType::TryCast(cast) => {
-                let expr = Box::new(parse_required_expr(&cast.expr)?);
-                let arrow_type: &protobuf::ArrowType = cast
-                    .arrow_type
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: CastNode message missing required field 'arrow_type'"))?;
-                let data_type = arrow_type.try_into()?;
-                Ok(Expr::TryCast { expr, data_type })
-            }
-            ExprType::Sort(sort) => Ok(Expr::Sort {
-                expr: Box::new(parse_required_expr(&sort.expr)?),
-                asc: sort.asc,
-                nulls_first: sort.nulls_first,
-            }),
-            ExprType::Negative(negative) => Ok(Expr::Negative(Box::new(
-                parse_required_expr(&negative.expr)?,
-            ))),
-            ExprType::InList(in_list) => Ok(Expr::InList {
-                expr: Box::new(parse_required_expr(&in_list.expr)?),
-                list: in_list
-                    .list
-                    .iter()
-                    .map(|expr| expr.try_into())
-                    .collect::<Result<Vec<_>, _>>()?,
-                negated: in_list.negated,
-            }),
-            ExprType::Wildcard(_) => Ok(Expr::Wildcard),
-            ExprType::ScalarFunction(expr) => {
-                let scalar_function = protobuf::ScalarFunction::from_i32(expr.fun)
-                    .ok_or_else(|| {
-                        proto_error(format!(
-                            "Received an unknown scalar function: {}",
-                            expr.fun
-                        ))
-                    })?;
-                match scalar_function {
-                    protobuf::ScalarFunction::Sqrt => {
-                        Ok(sqrt((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Sin => Ok(sin((&expr.expr[0]).try_into()?)),
-                    protobuf::ScalarFunction::Cos => Ok(cos((&expr.expr[0]).try_into()?)),
-                    protobuf::ScalarFunction::Tan => Ok(tan((&expr.expr[0]).try_into()?)),
-                    // protobuf::ScalarFunction::Asin => Ok(asin(&expr.expr[0]).try_into()?)),
-                    // protobuf::ScalarFunction::Acos => Ok(acos(&expr.expr[0]).try_into()?)),
-                    protobuf::ScalarFunction::Atan => {
-                        Ok(atan((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Exp => Ok(exp((&expr.expr[0]).try_into()?)),
-                    protobuf::ScalarFunction::Log2 => {
-                        Ok(log2((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Log10 => {
-                        Ok(log10((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Floor => {
-                        Ok(floor((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Ceil => {
-                        Ok(ceil((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Round => {
-                        Ok(round((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Trunc => {
-                        Ok(trunc((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Abs => Ok(abs((&expr.expr[0]).try_into()?)),
-                    protobuf::ScalarFunction::Signum => {
-                        Ok(signum((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Octetlength => {
-                        Ok(length((&expr.expr[0]).try_into()?))
-                    }
-                    // // protobuf::ScalarFunction::Concat => Ok(concat((&expr.expr[0]).try_into()?)),
-                    protobuf::ScalarFunction::Lower => {
-                        Ok(lower((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Upper => {
-                        Ok(upper((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Trim => {
-                        Ok(trim((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Ltrim => {
-                        Ok(ltrim((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Rtrim => {
-                        Ok(rtrim((&expr.expr[0]).try_into()?))
-                    }
-                    // protobuf::ScalarFunction::Totimestamp => Ok(to_timestamp((&expr.expr[0]).try_into()?)),
-                    // protobuf::ScalarFunction::Array => Ok(array((&expr.expr[0]).try_into()?)),
-                    // // protobuf::ScalarFunction::Nullif => Ok(nulli((&expr.expr[0]).try_into()?)),
-                    // protobuf::ScalarFunction::Datetrunc => Ok(date_trunc((&expr.expr[0]).try_into()?)),
-                    // protobuf::ScalarFunction::Md5 => Ok(md5((&expr.expr[0]).try_into()?)),
-                    protobuf::ScalarFunction::Sha224 => {
-                        Ok(sha224((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Sha256 => {
-                        Ok(sha256((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Sha384 => {
-                        Ok(sha384((&expr.expr[0]).try_into()?))
-                    }
-                    protobuf::ScalarFunction::Sha512 => {
-                        Ok(sha512((&expr.expr[0]).try_into()?))
-                    }
-                    _ => Err(proto_error(
-                        "Protobuf deserialization error: Unsupported scalar function",
-                    )),
-                }
-            }
-        }
-    }
-}
-
-fn from_proto_binary_op(op: &str) -> Result<Operator, BallistaError> {
-    match op {
-        "And" => Ok(Operator::And),
-        "Or" => Ok(Operator::Or),
-        "Eq" => Ok(Operator::Eq),
-        "NotEq" => Ok(Operator::NotEq),
-        "LtEq" => Ok(Operator::LtEq),
-        "Lt" => Ok(Operator::Lt),
-        "Gt" => Ok(Operator::Gt),
-        "GtEq" => Ok(Operator::GtEq),
-        "Plus" => Ok(Operator::Plus),
-        "Minus" => Ok(Operator::Minus),
-        "Multiply" => Ok(Operator::Multiply),
-        "Divide" => Ok(Operator::Divide),
-        "Like" => Ok(Operator::Like),
-        other => Err(proto_error(format!(
-            "Unsupported binary operator '{:?}'",
-            other
-        ))),
-    }
-}
-
-impl TryInto<arrow::datatypes::DataType> for &protobuf::ScalarType {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<arrow::datatypes::DataType, Self::Error> {
-        let pb_scalartype = self.datatype.as_ref().ok_or_else(|| {
-            proto_error("ScalarType message missing required field 'datatype'")
-        })?;
-        pb_scalartype.try_into()
-    }
-}
-
-impl TryInto<Schema> for &protobuf::Schema {
-    type Error = BallistaError;
-
-    fn try_into(self) -> Result<Schema, BallistaError> {
-        let fields = self
-            .columns
-            .iter()
-            .map(|c| {
-                let pb_arrow_type_res = c
-                    .arrow_type
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: Field message was missing required field 'arrow_type'"));
-                let pb_arrow_type: &protobuf::ArrowType = match pb_arrow_type_res {
-                    Ok(res) => res,
-                    Err(e) => return Err(e),
-                };
-                Ok(Field::new(&c.name, pb_arrow_type.try_into()?, c.nullable))
-            })
-            .collect::<Result<Vec<_>, _>>()?;
-        Ok(Schema::new(fields))
-    }
-}
-
-impl TryInto<arrow::datatypes::Field> for &protobuf::Field {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<arrow::datatypes::Field, Self::Error> {
-        let pb_datatype = self.arrow_type.as_ref().ok_or_else(|| {
-            proto_error(
-                "Protobuf deserialization error: Field message missing required field 'arrow_type'",
-            )
-        })?;
-
-        Ok(arrow::datatypes::Field::new(
-            self.name.as_str(),
-            pb_datatype.as_ref().try_into()?,
-            self.nullable,
-        ))
-    }
-}
-
-use datafusion::physical_plan::datetime_expressions::{date_trunc, to_timestamp};
-use datafusion::prelude::{
-    array, length, lower, ltrim, md5, rtrim, sha224, sha256, sha384, sha512, trim, upper,
-};
-use std::convert::TryFrom;
-
-impl TryFrom<i32> for protobuf::FileType {
-    type Error = BallistaError;
-    fn try_from(value: i32) -> Result<Self, Self::Error> {
-        use protobuf::FileType;
-        match value {
-            _x if _x == FileType::NdJson as i32 => Ok(FileType::NdJson),
-            _x if _x == FileType::Parquet as i32 => Ok(FileType::Parquet),
-            _x if _x == FileType::Csv as i32 => Ok(FileType::Csv),
-            invalid => Err(BallistaError::General(format!(
-                "Attempted to convert invalid i32 to protobuf::Filetype: {}",
-                invalid
-            ))),
-        }
-    }
-}
-
-impl Into<datafusion::sql::parser::FileType> for protobuf::FileType {
-    fn into(self) -> datafusion::sql::parser::FileType {
-        use datafusion::sql::parser::FileType;
-        match self {
-            protobuf::FileType::NdJson => FileType::NdJson,
-            protobuf::FileType::Parquet => FileType::Parquet,
-            protobuf::FileType::Csv => FileType::CSV,
-        }
-    }
-}
-
-fn parse_required_expr(
-    p: &Option<Box<protobuf::LogicalExprNode>>,
-) -> Result<Expr, BallistaError> {
-    match p {
-        Some(expr) => expr.as_ref().try_into(),
-        None => Err(proto_error("Missing required expression")),
-    }
-}
-
-fn parse_optional_expr(
-    p: &Option<Box<protobuf::LogicalExprNode>>,
-) -> Result<Option<Expr>, BallistaError> {
-    match p {
-        Some(expr) => expr.as_ref().try_into().map(Some),
-        None => Ok(None),
-    }
-}
diff --git a/rust/ballista/rust/core/src/serde/logical_plan/mod.rs b/rust/ballista/rust/core/src/serde/logical_plan/mod.rs
deleted file mode 100644
index 48dd96c..0000000
--- a/rust/ballista/rust/core/src/serde/logical_plan/mod.rs
+++ /dev/null
@@ -1,929 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-pub mod from_proto;
-pub mod to_proto;
-
-#[cfg(test)]
-
-mod roundtrip_tests {
-
-    use super::super::{super::error::Result, protobuf};
-    use crate::error::BallistaError;
-    use arrow::datatypes::{DataType, Field, Schema};
-    use core::panic;
-    use datafusion::physical_plan::functions::BuiltinScalarFunction::Sqrt;
-    use datafusion::{
-        logical_plan::{Expr, LogicalPlan, LogicalPlanBuilder},
-        physical_plan::csv::CsvReadOptions,
-        prelude::*,
-        scalar::ScalarValue,
-    };
-    use protobuf::arrow_type;
-    use std::convert::TryInto;
-
-    //Given a identity of a LogicalPlan converts it to protobuf and back, using debug formatting to test equality.
-    macro_rules! roundtrip_test {
-        ($initial_struct:ident, $proto_type:ty, $struct_type:ty) => {
-            let proto: $proto_type = (&$initial_struct).try_into()?;
-
-            let round_trip: $struct_type = (&proto).try_into()?;
-
-            assert_eq!(
-                format!("{:?}", $initial_struct),
-                format!("{:?}", round_trip)
-            );
-        };
-        ($initial_struct:ident, $struct_type:ty) => {
-            roundtrip_test!($initial_struct, protobuf::LogicalPlanNode, $struct_type);
-        };
-        ($initial_struct:ident) => {
-            roundtrip_test!($initial_struct, protobuf::LogicalPlanNode, LogicalPlan);
-        };
-    }
-
-    #[test]
-
-    fn roundtrip_repartition() -> Result<()> {
-        use datafusion::logical_plan::Partitioning;
-
-        let test_batch_sizes = [usize::MIN, usize::MAX, 43256];
-
-        let test_expr: Vec<Expr> = vec![
-            Expr::Column("c1".to_string()) + Expr::Column("c2".to_string()),
-            Expr::Literal((4.0).into()),
-        ];
-
-        let schema = Schema::new(vec![
-            Field::new("id", DataType::Int32, false),
-            Field::new("first_name", DataType::Utf8, false),
-            Field::new("last_name", DataType::Utf8, false),
-            Field::new("state", DataType::Utf8, false),
-            Field::new("salary", DataType::Int32, false),
-        ]);
-
-        let plan = std::sync::Arc::new(
-            LogicalPlanBuilder::scan_csv(
-                "employee.csv",
-                CsvReadOptions::new().schema(&schema).has_header(true),
-                Some(vec![3, 4]),
-            )
-            .and_then(|plan| plan.sort(vec![col("salary")]))
-            .and_then(|plan| plan.build())
-            .map_err(BallistaError::DataFusionError)?,
-        );
-
-        for batch_size in test_batch_sizes.iter() {
-            let rr_repartition = Partitioning::RoundRobinBatch(*batch_size);
-
-            let roundtrip_plan = LogicalPlan::Repartition {
-                input: plan.clone(),
-                partitioning_scheme: rr_repartition,
-            };
-
-            roundtrip_test!(roundtrip_plan);
-
-            let h_repartition = Partitioning::Hash(test_expr.clone(), *batch_size);
-
-            let roundtrip_plan = LogicalPlan::Repartition {
-                input: plan.clone(),
-                partitioning_scheme: h_repartition,
-            };
-
-            roundtrip_test!(roundtrip_plan);
-
-            let no_expr_hrepartition = Partitioning::Hash(Vec::new(), *batch_size);
-
-            let roundtrip_plan = LogicalPlan::Repartition {
-                input: plan.clone(),
-                partitioning_scheme: no_expr_hrepartition,
-            };
-
-            roundtrip_test!(roundtrip_plan);
-        }
-
-        Ok(())
-    }
-
-    fn new_box_field(
-        name: &str,
-        dt: DataType,
-        nullable: bool,
-    ) -> Box<arrow::datatypes::Field> {
-        Box::new(arrow::datatypes::Field::new(name, dt, nullable))
-    }
-
-    #[test]
-    fn scalar_values_error_serialization() -> Result<()> {
-        use arrow::datatypes::DataType;
-        use datafusion::scalar::ScalarValue;
-        let should_fail_on_seralize: Vec<ScalarValue> = vec![
-            //Should fail due to inconsistent types
-            ScalarValue::List(
-                Some(vec![
-                    ScalarValue::Int16(None),
-                    ScalarValue::Float32(Some(32.0)),
-                ]),
-                DataType::List(new_box_field("item", DataType::Int16, true)),
-            ),
-            ScalarValue::List(
-                Some(vec![
-                    ScalarValue::Float32(None),
-                    ScalarValue::Float32(Some(32.0)),
-                ]),
-                DataType::List(new_box_field("item", DataType::Int16, true)),
-            ),
-            ScalarValue::List(
-                Some(vec![
-                    ScalarValue::List(
-                        None,
-                        DataType::List(new_box_field("level2", DataType::Float32, true)),
-                    ),
-                    ScalarValue::List(
-                        Some(vec![
-                            ScalarValue::Float32(Some(-213.1)),
-                            ScalarValue::Float32(None),
-                            ScalarValue::Float32(Some(5.5)),
-                            ScalarValue::Float32(Some(2.0)),
-                            ScalarValue::Float32(Some(1.0)),
-                        ]),
-                        DataType::List(new_box_field("level2", DataType::Float32, true)),
-                    ),
-                    ScalarValue::List(
-                        None,
-                        DataType::List(new_box_field(
-                            "lists are typed inconsistently",
-                            DataType::Int16,
-                            true,
-                        )),
-                    ),
-                ]),
-                DataType::List(new_box_field(
-                    "level1",
-                    DataType::List(new_box_field("level2", DataType::Float32, true)),
-                    true,
-                )),
-            ),
-        ];
-
-        for test_case in should_fail_on_seralize.into_iter() {
-            let res: Result<protobuf::ScalarValue> = (&test_case).try_into();
-            if let Ok(val) = res {
-                return Err(BallistaError::General(format!(
-                    "The value {:?} should not have been able to serialize. Serialized to :{:?}",
-                    test_case, val
-                )));
-            }
-        }
-        Ok(())
-    }
-
-    #[test]
-    fn round_trip_scalar_values() -> Result<()> {
-        use arrow::datatypes::DataType;
-        use datafusion::scalar::ScalarValue;
-        let should_pass: Vec<ScalarValue> = vec![
-            ScalarValue::Boolean(None),
-            ScalarValue::Float32(None),
-            ScalarValue::Float64(None),
-            ScalarValue::Int8(None),
-            ScalarValue::Int16(None),
-            ScalarValue::Int32(None),
-            ScalarValue::Int64(None),
-            ScalarValue::UInt8(None),
-            ScalarValue::UInt16(None),
-            ScalarValue::UInt32(None),
-            ScalarValue::UInt64(None),
-            ScalarValue::Utf8(None),
-            ScalarValue::LargeUtf8(None),
-            ScalarValue::List(None, DataType::Boolean),
-            ScalarValue::Date32(None),
-            ScalarValue::TimestampMicrosecond(None),
-            ScalarValue::TimestampNanosecond(None),
-            ScalarValue::Boolean(Some(true)),
-            ScalarValue::Boolean(Some(false)),
-            ScalarValue::Float32(Some(1.0)),
-            ScalarValue::Float32(Some(f32::MAX)),
-            ScalarValue::Float32(Some(f32::MIN)),
-            ScalarValue::Float32(Some(-2000.0)),
-            ScalarValue::Float64(Some(1.0)),
-            ScalarValue::Float64(Some(f64::MAX)),
-            ScalarValue::Float64(Some(f64::MIN)),
-            ScalarValue::Float64(Some(-2000.0)),
-            ScalarValue::Int8(Some(i8::MIN)),
-            ScalarValue::Int8(Some(i8::MAX)),
-            ScalarValue::Int8(Some(0)),
-            ScalarValue::Int8(Some(-15)),
-            ScalarValue::Int16(Some(i16::MIN)),
-            ScalarValue::Int16(Some(i16::MAX)),
-            ScalarValue::Int16(Some(0)),
-            ScalarValue::Int16(Some(-15)),
-            ScalarValue::Int32(Some(i32::MIN)),
-            ScalarValue::Int32(Some(i32::MAX)),
-            ScalarValue::Int32(Some(0)),
-            ScalarValue::Int32(Some(-15)),
-            ScalarValue::Int64(Some(i64::MIN)),
-            ScalarValue::Int64(Some(i64::MAX)),
-            ScalarValue::Int64(Some(0)),
-            ScalarValue::Int64(Some(-15)),
-            ScalarValue::UInt8(Some(u8::MAX)),
-            ScalarValue::UInt8(Some(0)),
-            ScalarValue::UInt16(Some(u16::MAX)),
-            ScalarValue::UInt16(Some(0)),
-            ScalarValue::UInt32(Some(u32::MAX)),
-            ScalarValue::UInt32(Some(0)),
-            ScalarValue::UInt64(Some(u64::MAX)),
-            ScalarValue::UInt64(Some(0)),
-            ScalarValue::Utf8(Some(String::from("Test string   "))),
-            ScalarValue::LargeUtf8(Some(String::from("Test Large utf8"))),
-            ScalarValue::Date32(Some(0)),
-            ScalarValue::Date32(Some(i32::MAX)),
-            ScalarValue::TimestampNanosecond(Some(0)),
-            ScalarValue::TimestampNanosecond(Some(i64::MAX)),
-            ScalarValue::TimestampMicrosecond(Some(0)),
-            ScalarValue::TimestampMicrosecond(Some(i64::MAX)),
-            ScalarValue::TimestampMicrosecond(None),
-            ScalarValue::List(
-                Some(vec![
-                    ScalarValue::Float32(Some(-213.1)),
-                    ScalarValue::Float32(None),
-                    ScalarValue::Float32(Some(5.5)),
-                    ScalarValue::Float32(Some(2.0)),
-                    ScalarValue::Float32(Some(1.0)),
-                ]),
-                DataType::List(new_box_field("level1", DataType::Float32, true)),
-            ),
-            ScalarValue::List(
-                Some(vec![
-                    ScalarValue::List(
-                        None,
-                        DataType::List(new_box_field("level2", DataType::Float32, true)),
-                    ),
-                    ScalarValue::List(
-                        Some(vec![
-                            ScalarValue::Float32(Some(-213.1)),
-                            ScalarValue::Float32(None),
-                            ScalarValue::Float32(Some(5.5)),
-                            ScalarValue::Float32(Some(2.0)),
-                            ScalarValue::Float32(Some(1.0)),
-                        ]),
-                        DataType::List(new_box_field("level2", DataType::Float32, true)),
-                    ),
-                ]),
-                DataType::List(new_box_field(
-                    "level1",
-                    DataType::List(new_box_field("level2", DataType::Float32, true)),
-                    true,
-                )),
-            ),
-        ];
-
-        for test_case in should_pass.into_iter() {
-            let proto: protobuf::ScalarValue = (&test_case).try_into()?;
-            let _roundtrip: ScalarValue = (&proto).try_into()?;
-        }
-
-        Ok(())
-    }
-
-    #[test]
-    fn round_trip_scalar_types() -> Result<()> {
-        use arrow::datatypes::DataType;
-        use arrow::datatypes::{IntervalUnit, TimeUnit};
-        let should_pass: Vec<DataType> = vec![
-            DataType::Boolean,
-            DataType::Int8,
-            DataType::Int16,
-            DataType::Int32,
-            DataType::Int64,
-            DataType::UInt8,
-            DataType::UInt16,
-            DataType::UInt32,
-            DataType::UInt64,
-            DataType::Float32,
-            DataType::Float64,
-            DataType::Date32,
-            DataType::Time64(TimeUnit::Microsecond),
-            DataType::Time64(TimeUnit::Nanosecond),
-            DataType::Utf8,
-            DataType::LargeUtf8,
-            //Recursive list tests
-            DataType::List(new_box_field("Level1", DataType::Boolean, true)),
-            DataType::List(new_box_field(
-                "Level1",
-                DataType::List(new_box_field("Level2", DataType::Date32, true)),
-                true,
-            )),
-        ];
-
-        let should_fail: Vec<DataType> = vec![
-            DataType::Null,
-            DataType::Float16,
-            //Add more timestamp tests
-            DataType::Timestamp(TimeUnit::Millisecond, None),
-            DataType::Date64,
-            DataType::Time32(TimeUnit::Second),
-            DataType::Time32(TimeUnit::Millisecond),
-            DataType::Time32(TimeUnit::Microsecond),
-            DataType::Time32(TimeUnit::Nanosecond),
-            DataType::Time64(TimeUnit::Second),
-            DataType::Time64(TimeUnit::Millisecond),
-            DataType::Duration(TimeUnit::Second),
-            DataType::Duration(TimeUnit::Millisecond),
-            DataType::Duration(TimeUnit::Microsecond),
-            DataType::Duration(TimeUnit::Nanosecond),
-            DataType::Interval(IntervalUnit::YearMonth),
-            DataType::Interval(IntervalUnit::DayTime),
-            DataType::Binary,
-            DataType::FixedSizeBinary(0),
-            DataType::FixedSizeBinary(1234),
-            DataType::FixedSizeBinary(-432),
-            DataType::LargeBinary,
-            DataType::Decimal(1345, 5431),
-            //Recursive list tests
-            DataType::List(new_box_field("Level1", DataType::Binary, true)),
-            DataType::List(new_box_field(
-                "Level1",
-                DataType::List(new_box_field(
-                    "Level2",
-                    DataType::FixedSizeBinary(53),
-                    false,
-                )),
-                true,
-            )),
-            //Fixed size lists
-            DataType::FixedSizeList(new_box_field("Level1", DataType::Binary, true), 4),
-            DataType::FixedSizeList(
-                new_box_field(
-                    "Level1",
-                    DataType::List(new_box_field(
-                        "Level2",
-                        DataType::FixedSizeBinary(53),
-                        false,
-                    )),
-                    true,
-                ),
-                41,
-            ),
-            //Struct Testing
-            DataType::Struct(vec![
-                Field::new("nullable", DataType::Boolean, false),
-                Field::new("name", DataType::Utf8, false),
-                Field::new("datatype", DataType::Binary, false),
-            ]),
-            DataType::Struct(vec![
-                Field::new("nullable", DataType::Boolean, false),
-                Field::new("name", DataType::Utf8, false),
-                Field::new("datatype", DataType::Binary, false),
-                Field::new(
-                    "nested_struct",
-                    DataType::Struct(vec![
-                        Field::new("nullable", DataType::Boolean, false),
-                        Field::new("name", DataType::Utf8, false),
-                        Field::new("datatype", DataType::Binary, false),
-                    ]),
-                    true,
-                ),
-            ]),
-            DataType::Union(vec![
-                Field::new("nullable", DataType::Boolean, false),
-                Field::new("name", DataType::Utf8, false),
-                Field::new("datatype", DataType::Binary, false),
-            ]),
-            DataType::Union(vec![
-                Field::new("nullable", DataType::Boolean, false),
-                Field::new("name", DataType::Utf8, false),
-                Field::new("datatype", DataType::Binary, false),
-                Field::new(
-                    "nested_struct",
-                    DataType::Struct(vec![
-                        Field::new("nullable", DataType::Boolean, false),
-                        Field::new("name", DataType::Utf8, false),
-                        Field::new("datatype", DataType::Binary, false),
-                    ]),
-                    true,
-                ),
-            ]),
-            DataType::Dictionary(
-                Box::new(DataType::Utf8),
-                Box::new(DataType::Struct(vec![
-                    Field::new("nullable", DataType::Boolean, false),
-                    Field::new("name", DataType::Utf8, false),
-                    Field::new("datatype", DataType::Binary, false),
-                ])),
-            ),
-            DataType::Dictionary(
-                Box::new(DataType::Decimal(10, 50)),
-                Box::new(DataType::FixedSizeList(
-                    new_box_field("Level1", DataType::Binary, true),
-                    4,
-                )),
-            ),
-        ];
-
-        for test_case in should_pass.into_iter() {
-            let proto: protobuf::ScalarType = (&test_case).try_into()?;
-            let roundtrip: DataType = (&proto).try_into()?;
-            assert_eq!(format!("{:?}", test_case), format!("{:?}", roundtrip));
-        }
-
-        let mut success: Vec<DataType> = Vec::new();
-        for test_case in should_fail.into_iter() {
-            let proto: Result<protobuf::ScalarType> = (&test_case).try_into();
-            if proto.is_ok() {
-                success.push(test_case)
-            }
-        }
-        if !success.is_empty() {
-            return Err(BallistaError::General(format!(
-                "The following items which should have ressulted in an error completed successfully: {:?}",
-                success
-            )));
-        }
-        Ok(())
-    }
-
-    #[test]
-    fn round_trip_datatype() -> Result<()> {
-        use arrow::datatypes::DataType;
-        use arrow::datatypes::{IntervalUnit, TimeUnit};
-        let test_cases: Vec<DataType> = vec![
-            DataType::Null,
-            DataType::Boolean,
-            DataType::Int8,
-            DataType::Int16,
-            DataType::Int32,
-            DataType::Int64,
-            DataType::UInt8,
-            DataType::UInt16,
-            DataType::UInt32,
-            DataType::UInt64,
-            DataType::Float16,
-            DataType::Float32,
-            DataType::Float64,
-            //Add more timestamp tests
-            DataType::Timestamp(TimeUnit::Millisecond, None),
-            DataType::Date32,
-            DataType::Date64,
-            DataType::Time32(TimeUnit::Second),
-            DataType::Time32(TimeUnit::Millisecond),
-            DataType::Time32(TimeUnit::Microsecond),
-            DataType::Time32(TimeUnit::Nanosecond),
-            DataType::Time64(TimeUnit::Second),
-            DataType::Time64(TimeUnit::Millisecond),
-            DataType::Time64(TimeUnit::Microsecond),
-            DataType::Time64(TimeUnit::Nanosecond),
-            DataType::Duration(TimeUnit::Second),
-            DataType::Duration(TimeUnit::Millisecond),
-            DataType::Duration(TimeUnit::Microsecond),
-            DataType::Duration(TimeUnit::Nanosecond),
-            DataType::Interval(IntervalUnit::YearMonth),
-            DataType::Interval(IntervalUnit::DayTime),
-            DataType::Binary,
-            DataType::FixedSizeBinary(0),
-            DataType::FixedSizeBinary(1234),
-            DataType::FixedSizeBinary(-432),
-            DataType::LargeBinary,
-            DataType::Utf8,
-            DataType::LargeUtf8,
-            DataType::Decimal(1345, 5431),
-            //Recursive list tests
-            DataType::List(new_box_field("Level1", DataType::Binary, true)),
-            DataType::List(new_box_field(
-                "Level1",
-                DataType::List(new_box_field(
-                    "Level2",
-                    DataType::FixedSizeBinary(53),
-                    false,
-                )),
-                true,
-            )),
-            //Fixed size lists
-            DataType::FixedSizeList(new_box_field("Level1", DataType::Binary, true), 4),
-            DataType::FixedSizeList(
-                new_box_field(
-                    "Level1",
-                    DataType::List(new_box_field(
-                        "Level2",
-                        DataType::FixedSizeBinary(53),
-                        false,
-                    )),
-                    true,
-                ),
-                41,
-            ),
-            //Struct Testing
-            DataType::Struct(vec![
-                Field::new("nullable", DataType::Boolean, false),
-                Field::new("name", DataType::Utf8, false),
-                Field::new("datatype", DataType::Binary, false),
-            ]),
-            DataType::Struct(vec![
-                Field::new("nullable", DataType::Boolean, false),
-                Field::new("name", DataType::Utf8, false),
-                Field::new("datatype", DataType::Binary, false),
-                Field::new(
-                    "nested_struct",
-                    DataType::Struct(vec![
-                        Field::new("nullable", DataType::Boolean, false),
-                        Field::new("name", DataType::Utf8, false),
-                        Field::new("datatype", DataType::Binary, false),
-                    ]),
-                    true,
-                ),
-            ]),
-            DataType::Union(vec![
-                Field::new("nullable", DataType::Boolean, false),
-                Field::new("name", DataType::Utf8, false),
-                Field::new("datatype", DataType::Binary, false),
-            ]),
-            DataType::Union(vec![
-                Field::new("nullable", DataType::Boolean, false),
-                Field::new("name", DataType::Utf8, false),
-                Field::new("datatype", DataType::Binary, false),
-                Field::new(
-                    "nested_struct",
-                    DataType::Struct(vec![
-                        Field::new("nullable", DataType::Boolean, false),
-                        Field::new("name", DataType::Utf8, false),
-                        Field::new("datatype", DataType::Binary, false),
-                    ]),
-                    true,
-                ),
-            ]),
-            DataType::Dictionary(
-                Box::new(DataType::Utf8),
-                Box::new(DataType::Struct(vec![
-                    Field::new("nullable", DataType::Boolean, false),
-                    Field::new("name", DataType::Utf8, false),
-                    Field::new("datatype", DataType::Binary, false),
-                ])),
-            ),
-            DataType::Dictionary(
-                Box::new(DataType::Decimal(10, 50)),
-                Box::new(DataType::FixedSizeList(
-                    new_box_field("Level1", DataType::Binary, true),
-                    4,
-                )),
-            ),
-        ];
-
-        for test_case in test_cases.into_iter() {
-            let proto: protobuf::ArrowType = (&test_case).into();
-            let roundtrip: DataType = (&proto).try_into()?;
-            assert_eq!(format!("{:?}", test_case), format!("{:?}", roundtrip));
-        }
-        Ok(())
-    }
-
-    #[test]
-    fn roundtrip_null_scalar_values() -> Result<()> {
-        use arrow::datatypes::DataType;
-        use arrow::datatypes::Field;
-        use datafusion::scalar::ScalarValue;
-        let test_types = vec![
-            ScalarValue::Boolean(None),
-            ScalarValue::Float32(None),
-            ScalarValue::Float64(None),
-            ScalarValue::Int8(None),
-            ScalarValue::Int16(None),
-            ScalarValue::Int32(None),
-            ScalarValue::Int64(None),
-            ScalarValue::UInt8(None),
-            ScalarValue::UInt16(None),
-            ScalarValue::UInt32(None),
-            ScalarValue::UInt64(None),
-            ScalarValue::Utf8(None),
-            ScalarValue::LargeUtf8(None),
-            ScalarValue::Date32(None),
-            ScalarValue::TimestampMicrosecond(None),
-            ScalarValue::TimestampNanosecond(None),
-            //ScalarValue::List(None, DataType::Boolean)
-        ];
-
-        for test_case in test_types.into_iter() {
-            let proto_scalar: protobuf::ScalarValue = (&test_case).try_into()?;
-            let returned_scalar: datafusion::scalar::ScalarValue =
-                (&proto_scalar).try_into()?;
-            assert_eq!(
-                format!("{:?}", &test_case),
-                format!("{:?}", returned_scalar)
-            );
-        }
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_create_external_table() -> Result<()> {
-        let schema = Schema::new(vec![
-            Field::new("id", DataType::Int32, false),
-            Field::new("first_name", DataType::Utf8, false),
-            Field::new("last_name", DataType::Utf8, false),
-            Field::new("state", DataType::Utf8, false),
-            Field::new("salary", DataType::Int32, false),
-        ]);
-
-        use datafusion::logical_plan::ToDFSchema;
-
-        let df_schema_ref = schema.to_dfschema_ref()?;
-
-        use datafusion::sql::parser::FileType;
-
-        let filetypes: [FileType; 3] =
-            [FileType::NdJson, FileType::Parquet, FileType::CSV];
-
-        for file in filetypes.iter() {
-            let create_table_node = LogicalPlan::CreateExternalTable {
-                schema: df_schema_ref.clone(),
-                name: String::from("TestName"),
-                location: String::from("employee.csv"),
-                file_type: *file,
-                has_header: true,
-            };
-
-            roundtrip_test!(create_table_node);
-        }
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_explain() -> Result<()> {
-        let schema = Schema::new(vec![
-            Field::new("id", DataType::Int32, false),
-            Field::new("first_name", DataType::Utf8, false),
-            Field::new("last_name", DataType::Utf8, false),
-            Field::new("state", DataType::Utf8, false),
-            Field::new("salary", DataType::Int32, false),
-        ]);
-
-        let verbose_plan = LogicalPlanBuilder::scan_csv(
-            "employee.csv",
-            CsvReadOptions::new().schema(&schema).has_header(true),
-            Some(vec![3, 4]),
-        )
-        .and_then(|plan| plan.sort(vec![col("salary")]))
-        .and_then(|plan| plan.explain(true))
-        .and_then(|plan| plan.build())
-        .map_err(BallistaError::DataFusionError)?;
-
-        let plan = LogicalPlanBuilder::scan_csv(
-            "employee.csv",
-            CsvReadOptions::new().schema(&schema).has_header(true),
-            Some(vec![3, 4]),
-        )
-        .and_then(|plan| plan.sort(vec![col("salary")]))
-        .and_then(|plan| plan.explain(false))
-        .and_then(|plan| plan.build())
-        .map_err(BallistaError::DataFusionError)?;
-
-        roundtrip_test!(plan);
-
-        roundtrip_test!(verbose_plan);
-
-        Ok(())
-    }
-
-    #[test]
-    fn roundtrip_join() -> Result<()> {
-        let schema = Schema::new(vec![
-            Field::new("id", DataType::Int32, false),
-            Field::new("first_name", DataType::Utf8, false),
-            Field::new("last_name", DataType::Utf8, false),
-            Field::new("state", DataType::Utf8, false),
-            Field::new("salary", DataType::Int32, false),
-        ]);
-
-        let scan_plan = LogicalPlanBuilder::empty(false)
-            .build()
-            .map_err(BallistaError::DataFusionError)?;
-        let plan = LogicalPlanBuilder::scan_csv(
-            "employee.csv",
-            CsvReadOptions::new().schema(&schema).has_header(true),
-            Some(vec![3, 4]),
-        )
-        .and_then(|plan| plan.join(&scan_plan, JoinType::Inner, &["id"], &["id"]))
-        .and_then(|plan| plan.build())
-        .map_err(BallistaError::DataFusionError)?;
-
-        roundtrip_test!(plan);
-        Ok(())
-    }
-
-    #[test]
-    fn roundtrip_sort() -> Result<()> {
-        let schema = Schema::new(vec![
-            Field::new("id", DataType::Int32, false),
-            Field::new("first_name", DataType::Utf8, false),
-            Field::new("last_name", DataType::Utf8, false),
-            Field::new("state", DataType::Utf8, false),
-            Field::new("salary", DataType::Int32, false),
-        ]);
-
-        let plan = LogicalPlanBuilder::scan_csv(
-            "employee.csv",
-            CsvReadOptions::new().schema(&schema).has_header(true),
-            Some(vec![3, 4]),
-        )
-        .and_then(|plan| plan.sort(vec![col("salary")]))
-        .and_then(|plan| plan.build())
-        .map_err(BallistaError::DataFusionError)?;
-        roundtrip_test!(plan);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_empty_relation() -> Result<()> {
-        let plan_false = LogicalPlanBuilder::empty(false)
-            .build()
-            .map_err(BallistaError::DataFusionError)?;
-
-        roundtrip_test!(plan_false);
-
-        let plan_true = LogicalPlanBuilder::empty(true)
-            .build()
-            .map_err(BallistaError::DataFusionError)?;
-
-        roundtrip_test!(plan_true);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_logical_plan() -> Result<()> {
-        let schema = Schema::new(vec![
-            Field::new("id", DataType::Int32, false),
-            Field::new("first_name", DataType::Utf8, false),
-            Field::new("last_name", DataType::Utf8, false),
-            Field::new("state", DataType::Utf8, false),
-            Field::new("salary", DataType::Int32, false),
-        ]);
-
-        let plan = LogicalPlanBuilder::scan_csv(
-            "employee.csv",
-            CsvReadOptions::new().schema(&schema).has_header(true),
-            Some(vec![3, 4]),
-        )
-        .and_then(|plan| plan.aggregate(vec![col("state")], vec![max(col("salary"))]))
-        .and_then(|plan| plan.build())
-        .map_err(BallistaError::DataFusionError)?;
-
-        roundtrip_test!(plan);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_not() -> Result<()> {
-        let test_expr = Expr::Not(Box::new(Expr::Literal((1.0).into())));
-
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_is_null() -> Result<()> {
-        let test_expr = Expr::IsNull(Box::new(Expr::Column("id".into())));
-
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_is_not_null() -> Result<()> {
-        let test_expr = Expr::IsNotNull(Box::new(Expr::Column("id".into())));
-
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_between() -> Result<()> {
-        let test_expr = Expr::Between {
-            expr: Box::new(Expr::Literal((1.0).into())),
-            negated: true,
-            low: Box::new(Expr::Literal((2.0).into())),
-            high: Box::new(Expr::Literal((3.0).into())),
-        };
-
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_case() -> Result<()> {
-        let test_expr = Expr::Case {
-            expr: Some(Box::new(Expr::Literal((1.0).into()))),
-            when_then_expr: vec![(
-                Box::new(Expr::Literal((2.0).into())),
-                Box::new(Expr::Literal((3.0).into())),
-            )],
-            else_expr: Some(Box::new(Expr::Literal((4.0).into()))),
-        };
-
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_cast() -> Result<()> {
-        let test_expr = Expr::Cast {
-            expr: Box::new(Expr::Literal((1.0).into())),
-            data_type: DataType::Boolean,
-        };
-
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_sort_expr() -> Result<()> {
-        let test_expr = Expr::Sort {
-            expr: Box::new(Expr::Literal((1.0).into())),
-            asc: true,
-            nulls_first: true,
-        };
-
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_negative() -> Result<()> {
-        let test_expr = Expr::Negative(Box::new(Expr::Literal((1.0).into())));
-
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_inlist() -> Result<()> {
-        let test_expr = Expr::InList {
-            expr: Box::new(Expr::Literal((1.0).into())),
-            list: vec![Expr::Literal((2.0).into())],
-            negated: true,
-        };
-
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-
-    #[test]
-
-    fn roundtrip_wildcard() -> Result<()> {
-        let test_expr = Expr::Wildcard;
-
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-
-    #[test]
-    fn roundtrip_sqrt() -> Result<()> {
-        let test_expr = Expr::ScalarFunction {
-            fun: Sqrt,
-            args: vec![col("col")],
-        };
-        roundtrip_test!(test_expr, protobuf::LogicalExprNode, Expr);
-
-        Ok(())
-    }
-}
diff --git a/rust/ballista/rust/core/src/serde/logical_plan/to_proto.rs b/rust/ballista/rust/core/src/serde/logical_plan/to_proto.rs
deleted file mode 100644
index a181f98..0000000
--- a/rust/ballista/rust/core/src/serde/logical_plan/to_proto.rs
+++ /dev/null
@@ -1,1233 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! Serde code to convert Arrow schemas and DataFusion logical plans to Ballista protocol
-//! buffer format, allowing DataFusion logical plans to be serialized and transmitted between
-//! processes.
-
-use std::{
-    boxed,
-    convert::{TryFrom, TryInto},
-};
-
-use crate::datasource::DFTableAdapter;
-use crate::serde::{protobuf, BallistaError};
-
-use arrow::datatypes::{DataType, Schema};
-use datafusion::datasource::CsvFile;
-use datafusion::logical_plan::{Expr, JoinType, LogicalPlan};
-use datafusion::physical_plan::aggregates::AggregateFunction;
-use datafusion::{datasource::parquet::ParquetTable, logical_plan::exprlist_to_fields};
-use protobuf::{
-    arrow_type, logical_expr_node::ExprType, scalar_type, DateUnit, Field,
-    PrimitiveScalarType, ScalarListValue, ScalarType,
-};
-
-use super::super::proto_error;
-use datafusion::physical_plan::functions::BuiltinScalarFunction;
-
-impl protobuf::IntervalUnit {
-    pub fn from_arrow_interval_unit(
-        interval_unit: &arrow::datatypes::IntervalUnit,
-    ) -> Self {
-        match interval_unit {
-            arrow::datatypes::IntervalUnit::YearMonth => {
-                protobuf::IntervalUnit::YearMonth
-            }
-            arrow::datatypes::IntervalUnit::DayTime => protobuf::IntervalUnit::DayTime,
-        }
-    }
-
-    pub fn from_i32_to_arrow(
-        interval_unit_i32: i32,
-    ) -> Result<arrow::datatypes::IntervalUnit, BallistaError> {
-        let pb_interval_unit = protobuf::IntervalUnit::from_i32(interval_unit_i32);
-        use arrow::datatypes::IntervalUnit;
-        match pb_interval_unit {
-            Some(interval_unit) => Ok(match interval_unit {
-                protobuf::IntervalUnit::YearMonth => IntervalUnit::YearMonth,
-                protobuf::IntervalUnit::DayTime => IntervalUnit::DayTime,
-            }),
-            None => Err(proto_error(
-                "Error converting i32 to DateUnit: Passed invalid variant",
-            )),
-        }
-    }
-}
-/* Arrow changed dates to no longer have date unit
-
-impl protobuf::DateUnit {
-    pub fn from_arrow_date_unit(val: &arrow::datatypes::DateUnit) -> Self {
-        match val {
-            arrow::datatypes::DateUnit::Day => protobuf::DateUnit::Day,
-            arrow::datatypes::DateUnit::Millisecond => protobuf::DateUnit::DateMillisecond,
-        }
-    }
-    pub fn from_i32_to_arrow(date_unit_i32: i32) -> Result<arrow::datatypes::DateUnit, BallistaError> {
-        let pb_date_unit = protobuf::DateUnit::from_i32(date_unit_i32);
-        use arrow::datatypes::DateUnit;
-        match pb_date_unit {
-            Some(date_unit) => Ok(match date_unit {
-                protobuf::DateUnit::Day => DateUnit::Day,
-                protobuf::DateUnit::DateMillisecond => DateUnit::Millisecond,
-            }),
-            None => Err(proto_error("Error converting i32 to DateUnit: Passed invalid variant")),
-        }
-    }
-
-}*/
-
-impl protobuf::TimeUnit {
-    pub fn from_arrow_time_unit(val: &arrow::datatypes::TimeUnit) -> Self {
-        match val {
-            arrow::datatypes::TimeUnit::Second => protobuf::TimeUnit::Second,
-            arrow::datatypes::TimeUnit::Millisecond => {
-                protobuf::TimeUnit::TimeMillisecond
-            }
-            arrow::datatypes::TimeUnit::Microsecond => protobuf::TimeUnit::Microsecond,
-            arrow::datatypes::TimeUnit::Nanosecond => protobuf::TimeUnit::Nanosecond,
-        }
-    }
-    pub fn from_i32_to_arrow(
-        time_unit_i32: i32,
-    ) -> Result<arrow::datatypes::TimeUnit, BallistaError> {
-        let pb_time_unit = protobuf::TimeUnit::from_i32(time_unit_i32);
-        use arrow::datatypes::TimeUnit;
-        match pb_time_unit {
-            Some(time_unit) => Ok(match time_unit {
-                protobuf::TimeUnit::Second => TimeUnit::Second,
-                protobuf::TimeUnit::TimeMillisecond => TimeUnit::Millisecond,
-                protobuf::TimeUnit::Microsecond => TimeUnit::Microsecond,
-                protobuf::TimeUnit::Nanosecond => TimeUnit::Nanosecond,
-            }),
-            None => Err(proto_error(
-                "Error converting i32 to TimeUnit: Passed invalid variant",
-            )),
-        }
-    }
-}
-
-impl From<&arrow::datatypes::Field> for protobuf::Field {
-    fn from(field: &arrow::datatypes::Field) -> Self {
-        protobuf::Field {
-            name: field.name().to_owned(),
-            arrow_type: Some(Box::new(field.data_type().into())),
-            nullable: field.is_nullable(),
-            children: Vec::new(),
-        }
-    }
-}
-
-impl From<&arrow::datatypes::DataType> for protobuf::ArrowType {
-    fn from(val: &arrow::datatypes::DataType) -> protobuf::ArrowType {
-        protobuf::ArrowType {
-            arrow_type_enum: Some(val.into()),
-        }
-    }
-}
-
-impl TryInto<arrow::datatypes::DataType> for &protobuf::ArrowType {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<arrow::datatypes::DataType, Self::Error> {
-        let pb_arrow_type = self.arrow_type_enum.as_ref().ok_or_else(|| {
-            proto_error(
-                "Protobuf deserialization error: ArrowType missing required field 'data_type'",
-            )
-        })?;
-        use arrow::datatypes::DataType;
-        Ok(match pb_arrow_type {
-            protobuf::arrow_type::ArrowTypeEnum::None(_) => DataType::Null,
-            protobuf::arrow_type::ArrowTypeEnum::Bool(_) => DataType::Boolean,
-            protobuf::arrow_type::ArrowTypeEnum::Uint8(_) => DataType::UInt8,
-            protobuf::arrow_type::ArrowTypeEnum::Int8(_) => DataType::Int8,
-            protobuf::arrow_type::ArrowTypeEnum::Uint16(_) => DataType::UInt16,
-            protobuf::arrow_type::ArrowTypeEnum::Int16(_) => DataType::Int16,
-            protobuf::arrow_type::ArrowTypeEnum::Uint32(_) => DataType::UInt32,
-            protobuf::arrow_type::ArrowTypeEnum::Int32(_) => DataType::Int32,
-            protobuf::arrow_type::ArrowTypeEnum::Uint64(_) => DataType::UInt64,
-            protobuf::arrow_type::ArrowTypeEnum::Int64(_) => DataType::Int64,
-            protobuf::arrow_type::ArrowTypeEnum::Float16(_) => DataType::Float16,
-            protobuf::arrow_type::ArrowTypeEnum::Float32(_) => DataType::Float32,
-            protobuf::arrow_type::ArrowTypeEnum::Float64(_) => DataType::Float64,
-            protobuf::arrow_type::ArrowTypeEnum::Utf8(_) => DataType::Utf8,
-            protobuf::arrow_type::ArrowTypeEnum::LargeUtf8(_) => DataType::LargeUtf8,
-            protobuf::arrow_type::ArrowTypeEnum::Binary(_) => DataType::Binary,
-            protobuf::arrow_type::ArrowTypeEnum::FixedSizeBinary(size) => {
-                DataType::FixedSizeBinary(*size)
-            }
-            protobuf::arrow_type::ArrowTypeEnum::LargeBinary(_) => DataType::LargeBinary,
-            protobuf::arrow_type::ArrowTypeEnum::Date32(_) => DataType::Date32,
-            protobuf::arrow_type::ArrowTypeEnum::Date64(_) => DataType::Date64,
-            protobuf::arrow_type::ArrowTypeEnum::Duration(time_unit_i32) => {
-                DataType::Duration(protobuf::TimeUnit::from_i32_to_arrow(*time_unit_i32)?)
-            }
-            protobuf::arrow_type::ArrowTypeEnum::Timestamp(timestamp) => {
-                DataType::Timestamp(
-                    protobuf::TimeUnit::from_i32_to_arrow(timestamp.time_unit)?,
-                    match timestamp.timezone.is_empty() {
-                        true => None,
-                        false => Some(timestamp.timezone.to_owned()),
-                    },
-                )
-            }
-            protobuf::arrow_type::ArrowTypeEnum::Time32(time_unit_i32) => {
-                DataType::Time32(protobuf::TimeUnit::from_i32_to_arrow(*time_unit_i32)?)
-            }
-            protobuf::arrow_type::ArrowTypeEnum::Time64(time_unit_i32) => {
-                DataType::Time64(protobuf::TimeUnit::from_i32_to_arrow(*time_unit_i32)?)
-            }
-            protobuf::arrow_type::ArrowTypeEnum::Interval(interval_unit_i32) => {
-                DataType::Interval(protobuf::IntervalUnit::from_i32_to_arrow(
-                    *interval_unit_i32,
-                )?)
-            }
-            protobuf::arrow_type::ArrowTypeEnum::Decimal(protobuf::Decimal {
-                whole,
-                fractional,
-            }) => DataType::Decimal(*whole as usize, *fractional as usize),
-            protobuf::arrow_type::ArrowTypeEnum::List(boxed_list) => {
-                let field_ref = boxed_list
-                    .field_type
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: List message was missing required field 'field_type'"))?
-                    .as_ref();
-                arrow::datatypes::DataType::List(Box::new(field_ref.try_into()?))
-            }
-            protobuf::arrow_type::ArrowTypeEnum::LargeList(boxed_list) => {
-                let field_ref = boxed_list
-                    .field_type
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: List message was missing required field 'field_type'"))?
-                    .as_ref();
-                arrow::datatypes::DataType::LargeList(Box::new(field_ref.try_into()?))
-            }
-            protobuf::arrow_type::ArrowTypeEnum::FixedSizeList(boxed_list) => {
-                let fsl_ref = boxed_list.as_ref();
-                let pb_fieldtype = fsl_ref
-                    .field_type
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: FixedSizeList message was missing required field 'field_type'"))?;
-                arrow::datatypes::DataType::FixedSizeList(
-                    Box::new(pb_fieldtype.as_ref().try_into()?),
-                    fsl_ref.list_size,
-                )
-            }
-            protobuf::arrow_type::ArrowTypeEnum::Struct(struct_type) => {
-                let fields = struct_type
-                    .sub_field_types
-                    .iter()
-                    .map(|field| field.try_into())
-                    .collect::<Result<Vec<_>, _>>()?;
-                arrow::datatypes::DataType::Struct(fields)
-            }
-            protobuf::arrow_type::ArrowTypeEnum::Union(union) => {
-                let union_types = union
-                    .union_types
-                    .iter()
-                    .map(|field| field.try_into())
-                    .collect::<Result<Vec<_>, _>>()?;
-                arrow::datatypes::DataType::Union(union_types)
-            }
-            protobuf::arrow_type::ArrowTypeEnum::Dictionary(boxed_dict) => {
-                let dict_ref = boxed_dict.as_ref();
-                let pb_key = dict_ref
-                    .key
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: Dictionary message was missing required field 'key'"))?;
-                let pb_value = dict_ref
-                    .value
-                    .as_ref()
-                    .ok_or_else(|| proto_error("Protobuf deserialization error: Dictionary message was missing required field 'value'"))?;
-                arrow::datatypes::DataType::Dictionary(
-                    Box::new(pb_key.as_ref().try_into()?),
-                    Box::new(pb_value.as_ref().try_into()?),
-                )
-            }
-        })
-    }
-}
-
-impl TryInto<arrow::datatypes::DataType> for &Box<protobuf::List> {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<arrow::datatypes::DataType, Self::Error> {
-        let list_ref = self.as_ref();
-        match &list_ref.field_type {
-            Some(pb_field) => {
-                let pb_field_ref = pb_field.as_ref();
-                let arrow_field: arrow::datatypes::Field = pb_field_ref.try_into()?;
-                Ok(arrow::datatypes::DataType::List(Box::new(arrow_field)))
-            }
-            None => Err(proto_error(
-                "List message missing required field 'field_type'",
-            )),
-        }
-    }
-}
-
-impl From<&arrow::datatypes::DataType> for protobuf::arrow_type::ArrowTypeEnum {
-    fn from(val: &arrow::datatypes::DataType) -> protobuf::arrow_type::ArrowTypeEnum {
-        use protobuf::arrow_type::ArrowTypeEnum;
-        use protobuf::ArrowType;
-        use protobuf::EmptyMessage;
-        match val {
-            DataType::Null => ArrowTypeEnum::None(EmptyMessage {}),
-            DataType::Boolean => ArrowTypeEnum::Bool(EmptyMessage {}),
-            DataType::Int8 => ArrowTypeEnum::Int8(EmptyMessage {}),
-            DataType::Int16 => ArrowTypeEnum::Int16(EmptyMessage {}),
-            DataType::Int32 => ArrowTypeEnum::Int32(EmptyMessage {}),
-            DataType::Int64 => ArrowTypeEnum::Int64(EmptyMessage {}),
-            DataType::UInt8 => ArrowTypeEnum::Uint8(EmptyMessage {}),
-            DataType::UInt16 => ArrowTypeEnum::Uint16(EmptyMessage {}),
-            DataType::UInt32 => ArrowTypeEnum::Uint32(EmptyMessage {}),
-            DataType::UInt64 => ArrowTypeEnum::Uint64(EmptyMessage {}),
-            DataType::Float16 => ArrowTypeEnum::Float16(EmptyMessage {}),
-            DataType::Float32 => ArrowTypeEnum::Float32(EmptyMessage {}),
-            DataType::Float64 => ArrowTypeEnum::Float64(EmptyMessage {}),
-            DataType::Timestamp(time_unit, timezone) => {
-                ArrowTypeEnum::Timestamp(protobuf::Timestamp {
-                    time_unit: protobuf::TimeUnit::from_arrow_time_unit(time_unit) as i32,
-                    timezone: timezone.to_owned().unwrap_or_else(String::new),
-                })
-            }
-            DataType::Date32 => ArrowTypeEnum::Date32(EmptyMessage {}),
-            DataType::Date64 => ArrowTypeEnum::Date64(EmptyMessage {}),
-            DataType::Time32(time_unit) => ArrowTypeEnum::Time32(
-                protobuf::TimeUnit::from_arrow_time_unit(time_unit) as i32,
-            ),
-            DataType::Time64(time_unit) => ArrowTypeEnum::Time64(
-                protobuf::TimeUnit::from_arrow_time_unit(time_unit) as i32,
-            ),
-            DataType::Duration(time_unit) => ArrowTypeEnum::Duration(
-                protobuf::TimeUnit::from_arrow_time_unit(time_unit) as i32,
-            ),
-            DataType::Interval(interval_unit) => ArrowTypeEnum::Interval(
-                protobuf::IntervalUnit::from_arrow_interval_unit(interval_unit) as i32,
-            ),
-            DataType::Binary => ArrowTypeEnum::Binary(EmptyMessage {}),
-            DataType::FixedSizeBinary(size) => ArrowTypeEnum::FixedSizeBinary(*size),
-            DataType::LargeBinary => ArrowTypeEnum::LargeBinary(EmptyMessage {}),
-            DataType::Utf8 => ArrowTypeEnum::Utf8(EmptyMessage {}),
-            DataType::LargeUtf8 => ArrowTypeEnum::LargeUtf8(EmptyMessage {}),
-            DataType::List(item_type) => ArrowTypeEnum::List(Box::new(protobuf::List {
-                field_type: Some(Box::new(item_type.as_ref().into())),
-            })),
-            DataType::FixedSizeList(item_type, size) => {
-                ArrowTypeEnum::FixedSizeList(Box::new(protobuf::FixedSizeList {
-                    field_type: Some(Box::new(item_type.as_ref().into())),
-                    list_size: *size,
-                }))
-            }
-            DataType::LargeList(item_type) => {
-                ArrowTypeEnum::LargeList(Box::new(protobuf::List {
-                    field_type: Some(Box::new(item_type.as_ref().into())),
-                }))
-            }
-            DataType::Struct(struct_fields) => ArrowTypeEnum::Struct(protobuf::Struct {
-                sub_field_types: struct_fields
-                    .iter()
-                    .map(|field| field.into())
-                    .collect::<Vec<_>>(),
-            }),
-            DataType::Union(union_types) => ArrowTypeEnum::Union(protobuf::Union {
-                union_types: union_types
-                    .iter()
-                    .map(|field| field.into())
-                    .collect::<Vec<_>>(),
-            }),
-            DataType::Dictionary(key_type, value_type) => {
-                ArrowTypeEnum::Dictionary(Box::new(protobuf::Dictionary {
-                    key: Some(Box::new(key_type.as_ref().into())),
-                    value: Some(Box::new(value_type.as_ref().into())),
-                }))
-            }
-            DataType::Decimal(whole, fractional) => {
-                ArrowTypeEnum::Decimal(protobuf::Decimal {
-                    whole: *whole as u64,
-                    fractional: *fractional as u64,
-                })
-            }
-        }
-    }
-}
-
-//Does not check if list subtypes are valid
-fn is_valid_scalar_type_no_list_check(datatype: &arrow::datatypes::DataType) -> bool {
-    match datatype {
-        DataType::Boolean
-        | DataType::Int8
-        | DataType::Int16
-        | DataType::Int32
-        | DataType::Int64
-        | DataType::UInt8
-        | DataType::UInt16
-        | DataType::UInt32
-        | DataType::UInt64
-        | DataType::Float32
-        | DataType::Float64
-        | DataType::LargeUtf8
-        | DataType::Utf8
-        | DataType::Date32 => true,
-        DataType::Time64(time_unit) => matches!(
-            time_unit,
-            arrow::datatypes::TimeUnit::Microsecond
-                | arrow::datatypes::TimeUnit::Nanosecond
-        ),
-
-        DataType::List(_) => true,
-        _ => false,
-    }
-}
-
-impl TryFrom<&arrow::datatypes::DataType> for protobuf::scalar_type::Datatype {
-    type Error = BallistaError;
-    fn try_from(val: &arrow::datatypes::DataType) -> Result<Self, Self::Error> {
-        use protobuf::scalar_type;
-        use protobuf::Field;
-        use protobuf::{List, PrimitiveScalarType};
-        let scalar_value = match val {
-            DataType::Boolean => scalar_type::Datatype::Scalar(PrimitiveScalarType::Bool as i32),
-            DataType::Int8 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Int8 as i32),
-            DataType::Int16 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Int16 as i32),
-            DataType::Int32 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Int32 as i32),
-            DataType::Int64 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Int64 as i32),
-            DataType::UInt8 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Uint8 as i32),
-            DataType::UInt16 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Uint16 as i32),
-            DataType::UInt32 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Uint32 as i32),
-            DataType::UInt64 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Uint64 as i32),
-            DataType::Float32 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Float32 as i32),
-            DataType::Float64 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Float64 as i32),
-            DataType::Date32 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Date32 as i32),
-            DataType::Time64(time_unit) => match time_unit {
-                arrow::datatypes::TimeUnit::Microsecond => scalar_type::Datatype::Scalar(PrimitiveScalarType::TimeMicrosecond as i32),
-                arrow::datatypes::TimeUnit::Nanosecond => scalar_type::Datatype::Scalar(PrimitiveScalarType::TimeNanosecond as i32),
-                _ => {
-                    return Err(proto_error(format!(
-                        "Found invalid time unit for scalar value, only TimeUnit::Microsecond and TimeUnit::Nanosecond are valid time units: {:?}",
-                        time_unit
-                    )))
-                }
-            },
-            DataType::Utf8 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Utf8 as i32),
-            DataType::LargeUtf8 => scalar_type::Datatype::Scalar(PrimitiveScalarType::LargeUtf8 as i32),
-            DataType::List(field_type) => {
-                let mut field_names: Vec<String> = Vec::new();
-                let mut curr_field: &arrow::datatypes::Field = field_type.as_ref();
-                field_names.push(curr_field.name().to_owned());
-                //For each nested field check nested datatype, since datafusion scalars only support recursive lists with a leaf scalar type
-                // any other compound types are errors.
-
-                while let DataType::List(nested_field_type) = curr_field.data_type() {
-                    curr_field = nested_field_type.as_ref();
-                    field_names.push(curr_field.name().to_owned());
-                    if !is_valid_scalar_type_no_list_check(curr_field.data_type()) {
-                        return Err(proto_error(format!("{:?} is an invalid scalar type", curr_field)));
-                    }
-                }
-                let deepest_datatype = curr_field.data_type();
-                if !is_valid_scalar_type_no_list_check(deepest_datatype) {
-                    return Err(proto_error(format!("The list nested type {:?} is an invalid scalar type", curr_field)));
-                }
-                let pb_deepest_type: PrimitiveScalarType = match deepest_datatype {
-                    DataType::Boolean => PrimitiveScalarType::Bool,
-                    DataType::Int8 => PrimitiveScalarType::Int8,
-                    DataType::Int16 => PrimitiveScalarType::Int16,
-                    DataType::Int32 => PrimitiveScalarType::Int32,
-                    DataType::Int64 => PrimitiveScalarType::Int64,
-                    DataType::UInt8 => PrimitiveScalarType::Uint8,
-                    DataType::UInt16 => PrimitiveScalarType::Uint16,
-                    DataType::UInt32 => PrimitiveScalarType::Uint32,
-                    DataType::UInt64 => PrimitiveScalarType::Uint64,
-                    DataType::Float32 => PrimitiveScalarType::Float32,
-                    DataType::Float64 => PrimitiveScalarType::Float64,
-                    DataType::Date32 => PrimitiveScalarType::Date32,
-                    DataType::Time64(time_unit) => match time_unit {
-                        arrow::datatypes::TimeUnit::Microsecond => PrimitiveScalarType::TimeMicrosecond,
-                        arrow::datatypes::TimeUnit::Nanosecond => PrimitiveScalarType::TimeNanosecond,
-                        _ => {
-                            return Err(proto_error(format!(
-                                "Found invalid time unit for scalar value, only TimeUnit::Microsecond and TimeUnit::Nanosecond are valid time units: {:?}",
-                                time_unit
-                            )))
-                        }
-                    },
-
-                    DataType::Utf8 => PrimitiveScalarType::Utf8,
-                    DataType::LargeUtf8 => PrimitiveScalarType::LargeUtf8,
-                    _ => {
-                        return Err(proto_error(format!(
-                            "Error converting to Datatype to scalar type, {:?} is invalid as a datafusion scalar.",
-                            val
-                        )))
-                    }
-                };
-                protobuf::scalar_type::Datatype::List(protobuf::ScalarListType {
-                    field_names,
-                    deepest_type: pb_deepest_type as i32,
-                })
-            }
-            DataType::Null
-            | DataType::Float16
-            | DataType::Timestamp(_, _)
-            | DataType::Date64
-            | DataType::Time32(_)
-            | DataType::Duration(_)
-            | DataType::Interval(_)
-            | DataType::Binary
-            | DataType::FixedSizeBinary(_)
-            | DataType::LargeBinary
-            | DataType::FixedSizeList(_, _)
-            | DataType::LargeList(_)
-            | DataType::Struct(_)
-            | DataType::Union(_)
-            | DataType::Dictionary(_, _)
-            | DataType::Decimal(_, _) => {
-                return Err(proto_error(format!(
-                    "Error converting to Datatype to scalar type, {:?} is invalid as a datafusion scalar.",
-                    val
-                )))
-            }
-        };
-        Ok(scalar_value)
-    }
-}
-
-impl TryFrom<&datafusion::scalar::ScalarValue> for protobuf::ScalarValue {
-    type Error = BallistaError;
-    fn try_from(
-        val: &datafusion::scalar::ScalarValue,
-    ) -> Result<protobuf::ScalarValue, Self::Error> {
-        use datafusion::scalar;
-        use protobuf::scalar_value::Value;
-        use protobuf::PrimitiveScalarType;
-        let scalar_val = match val {
-            scalar::ScalarValue::Boolean(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Bool, |s| Value::BoolValue(*s))
-            }
-            scalar::ScalarValue::Float32(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Float32, |s| {
-                    Value::Float32Value(*s)
-                })
-            }
-            scalar::ScalarValue::Float64(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Float64, |s| {
-                    Value::Float64Value(*s)
-                })
-            }
-            scalar::ScalarValue::Int8(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Int8, |s| {
-                    Value::Int8Value(*s as i32)
-                })
-            }
-            scalar::ScalarValue::Int16(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Int16, |s| {
-                    Value::Int16Value(*s as i32)
-                })
-            }
-            scalar::ScalarValue::Int32(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Int32, |s| Value::Int32Value(*s))
-            }
-            scalar::ScalarValue::Int64(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Int64, |s| Value::Int64Value(*s))
-            }
-            scalar::ScalarValue::UInt8(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Uint8, |s| {
-                    Value::Uint8Value(*s as u32)
-                })
-            }
-            scalar::ScalarValue::UInt16(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Uint16, |s| {
-                    Value::Uint16Value(*s as u32)
-                })
-            }
-            scalar::ScalarValue::UInt32(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Uint32, |s| Value::Uint32Value(*s))
-            }
-            scalar::ScalarValue::UInt64(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Uint64, |s| Value::Uint64Value(*s))
-            }
-            scalar::ScalarValue::Utf8(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Utf8, |s| {
-                    Value::Utf8Value(s.to_owned())
-                })
-            }
-            scalar::ScalarValue::LargeUtf8(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::LargeUtf8, |s| {
-                    Value::LargeUtf8Value(s.to_owned())
-                })
-            }
-            scalar::ScalarValue::List(value, datatype) => {
-                println!("Current datatype of list: {:?}", datatype);
-                match value {
-                    Some(values) => {
-                        if values.is_empty() {
-                            protobuf::ScalarValue {
-                                value: Some(protobuf::scalar_value::Value::ListValue(
-                                    protobuf::ScalarListValue {
-                                        datatype: Some(datatype.try_into()?),
-                                        values: Vec::new(),
-                                    },
-                                )),
-                            }
-                        } else {
-                            let scalar_type = match datatype {
-                                DataType::List(field) => field.as_ref().data_type(),
-                                _ => todo!("Proper error handling"),
-                            };
-                            println!("Current scalar type for list: {:?}", scalar_type);
-                            let type_checked_values: Vec<protobuf::ScalarValue> = values
-                                .iter()
-                                .map(|scalar| match (scalar, scalar_type) {
-                                    (scalar::ScalarValue::List(_, arrow::datatypes::DataType::List(list_field)), arrow::datatypes::DataType::List(field)) => {
-                                        let scalar_datatype = field.data_type();
-                                        let list_datatype = list_field.data_type();
-                                        if std::mem::discriminant(list_datatype) != std::mem::discriminant(scalar_datatype) {
-                                            return Err(proto_error(format!(
-                                                "Protobuf serialization error: Lists with inconsistent typing {:?} and {:?} found within list",
-                                                list_datatype, scalar_datatype
-                                            )));
-                                        }
-                                        scalar.try_into()
-                                    }
-                                    (scalar::ScalarValue::Boolean(_), arrow::datatypes::DataType::Boolean) => scalar.try_into(),
-                                    (scalar::ScalarValue::Float32(_), arrow::datatypes::DataType::Float32) => scalar.try_into(),
-                                    (scalar::ScalarValue::Float64(_), arrow::datatypes::DataType::Float64) => scalar.try_into(),
-                                    (scalar::ScalarValue::Int8(_), arrow::datatypes::DataType::Int8) => scalar.try_into(),
-                                    (scalar::ScalarValue::Int16(_), arrow::datatypes::DataType::Int16) => scalar.try_into(),
-                                    (scalar::ScalarValue::Int32(_), arrow::datatypes::DataType::Int32) => scalar.try_into(),
-                                    (scalar::ScalarValue::Int64(_), arrow::datatypes::DataType::Int64) => scalar.try_into(),
-                                    (scalar::ScalarValue::UInt8(_), arrow::datatypes::DataType::UInt8) => scalar.try_into(),
-                                    (scalar::ScalarValue::UInt16(_), arrow::datatypes::DataType::UInt16) => scalar.try_into(),
-                                    (scalar::ScalarValue::UInt32(_), arrow::datatypes::DataType::UInt32) => scalar.try_into(),
-                                    (scalar::ScalarValue::UInt64(_), arrow::datatypes::DataType::UInt64) => scalar.try_into(),
-                                    (scalar::ScalarValue::Utf8(_), arrow::datatypes::DataType::Utf8) => scalar.try_into(),
-                                    (scalar::ScalarValue::LargeUtf8(_), arrow::datatypes::DataType::LargeUtf8) => scalar.try_into(),
-                                    _ => Err(proto_error(format!(
-                                        "Protobuf serialization error, {:?} was inconsistent with designated type {:?}",
-                                        scalar, datatype
-                                    ))),
-                                })
-                                .collect::<Result<Vec<_>, _>>()?;
-                            protobuf::ScalarValue {
-                                value: Some(protobuf::scalar_value::Value::ListValue(
-                                    protobuf::ScalarListValue {
-                                        datatype: Some(datatype.try_into()?),
-                                        values: type_checked_values,
-                                    },
-                                )),
-                            }
-                        }
-                    }
-                    None => protobuf::ScalarValue {
-                        value: Some(protobuf::scalar_value::Value::NullListValue(
-                            datatype.try_into()?,
-                        )),
-                    },
-                }
-            }
-            datafusion::scalar::ScalarValue::Date32(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::Date32, |s| Value::Date32Value(*s))
-            }
-            datafusion::scalar::ScalarValue::TimestampMicrosecond(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::TimeMicrosecond, |s| {
-                    Value::TimeMicrosecondValue(*s)
-                })
-            }
-            datafusion::scalar::ScalarValue::TimestampNanosecond(val) => {
-                create_proto_scalar(val, PrimitiveScalarType::TimeNanosecond, |s| {
-                    Value::TimeNanosecondValue(*s)
-                })
-            }
-            _ => {
-                return Err(proto_error(format!(
-                    "Error converting to Datatype to scalar type, {:?} is invalid as a datafusion scalar.",
-                    val
-                )))
-            }
-        };
-        Ok(scalar_val)
-    }
-}
-
-impl TryInto<protobuf::LogicalPlanNode> for &LogicalPlan {
-    type Error = BallistaError;
-
-    fn try_into(self) -> Result<protobuf::LogicalPlanNode, Self::Error> {
-        use protobuf::logical_plan_node::LogicalPlanType;
-        match self {
-            LogicalPlan::TableScan {
-                table_name,
-                source,
-                filters,
-                projection,
-                ..
-            } => {
-                let schema = source.schema();
-
-                // unwrap the DFTableAdapter to get to the real TableProvider
-                let source = if let Some(adapter) =
-                    source.as_any().downcast_ref::<DFTableAdapter>()
-                {
-                    match &adapter.logical_plan {
-                        LogicalPlan::TableScan { source, .. } => Ok(source.as_any()),
-                        _ => Err(BallistaError::General(
-                            "Invalid LogicalPlan::TableScan".to_owned(),
-                        )),
-                    }
-                } else {
-                    Ok(source.as_any())
-                }?;
-
-                let projection = match projection {
-                    None => None,
-                    Some(columns) => {
-                        let column_names = columns
-                            .iter()
-                            .map(|i| schema.field(*i).name().to_owned())
-                            .collect();
-                        Some(protobuf::ProjectionColumns {
-                            columns: column_names,
-                        })
-                    }
-                };
-                let schema: protobuf::Schema = schema.as_ref().into();
-
-                let filters: Vec<protobuf::LogicalExprNode> = filters
-                    .iter()
-                    .map(|filter| filter.try_into())
-                    .collect::<Result<Vec<_>, _>>()?;
-
-                if let Some(parquet) = source.downcast_ref::<ParquetTable>() {
-                    Ok(protobuf::LogicalPlanNode {
-                        logical_plan_type: Some(LogicalPlanType::ParquetScan(
-                            protobuf::ParquetTableScanNode {
-                                table_name: table_name.to_owned(),
-                                path: parquet.path().to_owned(),
-                                projection,
-                                schema: Some(schema),
-                                filters,
-                            },
-                        )),
-                    })
-                } else if let Some(csv) = source.downcast_ref::<CsvFile>() {
-                    let delimiter = [csv.delimiter()];
-                    let delimiter = std::str::from_utf8(&delimiter).map_err(|_| {
-                        BallistaError::General("Invalid CSV delimiter".to_owned())
-                    })?;
-                    Ok(protobuf::LogicalPlanNode {
-                        logical_plan_type: Some(LogicalPlanType::CsvScan(
-                            protobuf::CsvTableScanNode {
-                                table_name: table_name.to_owned(),
-                                path: csv.path().to_owned(),
-                                projection,
-                                schema: Some(schema),
-                                has_header: csv.has_header(),
-                                delimiter: delimiter.to_string(),
-                                file_extension: csv.file_extension().to_string(),
-                                filters,
-                            },
-                        )),
-                    })
-                } else {
-                    Err(BallistaError::General(format!(
-                        "logical plan to_proto unsupported table provider {:?}",
-                        source
-                    )))
-                }
-            }
-            LogicalPlan::Projection { expr, input, .. } => {
-                Ok(protobuf::LogicalPlanNode {
-                    logical_plan_type: Some(LogicalPlanType::Projection(Box::new(
-                        protobuf::ProjectionNode {
-                            input: Some(Box::new(input.as_ref().try_into()?)),
-                            expr: expr
-                                .iter()
-                                .map(|expr| expr.try_into())
-                                .collect::<Result<Vec<_>, BallistaError>>()?,
-                        },
-                    ))),
-                })
-            }
-            LogicalPlan::Filter { predicate, input } => {
-                let input: protobuf::LogicalPlanNode = input.as_ref().try_into()?;
-                Ok(protobuf::LogicalPlanNode {
-                    logical_plan_type: Some(LogicalPlanType::Selection(Box::new(
-                        protobuf::SelectionNode {
-                            input: Some(Box::new(input)),
-                            expr: Some(predicate.try_into()?),
-                        },
-                    ))),
-                })
-            }
-            LogicalPlan::Aggregate {
-                input,
-                group_expr,
-                aggr_expr,
-                ..
-            } => {
-                let input: protobuf::LogicalPlanNode = input.as_ref().try_into()?;
-                Ok(protobuf::LogicalPlanNode {
-                    logical_plan_type: Some(LogicalPlanType::Aggregate(Box::new(
-                        protobuf::AggregateNode {
-                            input: Some(Box::new(input)),
-                            group_expr: group_expr
-                                .iter()
-                                .map(|expr| expr.try_into())
-                                .collect::<Result<Vec<_>, BallistaError>>()?,
-                            aggr_expr: aggr_expr
-                                .iter()
-                                .map(|expr| expr.try_into())
-                                .collect::<Result<Vec<_>, BallistaError>>()?,
-                        },
-                    ))),
-                })
-            }
-            LogicalPlan::Join {
-                left,
-                right,
-                on,
-                join_type,
-                ..
-            } => {
-                let left: protobuf::LogicalPlanNode = left.as_ref().try_into()?;
-                let right: protobuf::LogicalPlanNode = right.as_ref().try_into()?;
-                let join_type = match join_type {
-                    JoinType::Inner => protobuf::JoinType::Inner,
-                    JoinType::Left => protobuf::JoinType::Left,
-                    JoinType::Right => protobuf::JoinType::Right,
-                };
-                let left_join_column = on.iter().map(|on| on.0.to_owned()).collect();
-                let right_join_column = on.iter().map(|on| on.1.to_owned()).collect();
-                Ok(protobuf::LogicalPlanNode {
-                    logical_plan_type: Some(LogicalPlanType::Join(Box::new(
-                        protobuf::JoinNode {
-                            left: Some(Box::new(left)),
-                            right: Some(Box::new(right)),
-                            join_type: join_type.into(),
-                            left_join_column,
-                            right_join_column,
-                        },
-                    ))),
-                })
-            }
-            LogicalPlan::Limit { input, n } => {
-                let input: protobuf::LogicalPlanNode = input.as_ref().try_into()?;
-                Ok(protobuf::LogicalPlanNode {
-                    logical_plan_type: Some(LogicalPlanType::Limit(Box::new(
-                        protobuf::LimitNode {
-                            input: Some(Box::new(input)),
-                            limit: *n as u32,
-                        },
-                    ))),
-                })
-            }
-            LogicalPlan::Sort { input, expr } => {
-                let input: protobuf::LogicalPlanNode = input.as_ref().try_into()?;
-                let selection_expr: Vec<protobuf::LogicalExprNode> = expr
-                    .iter()
-                    .map(|expr| expr.try_into())
-                    .collect::<Result<Vec<_>, BallistaError>>()?;
-                Ok(protobuf::LogicalPlanNode {
-                    logical_plan_type: Some(LogicalPlanType::Sort(Box::new(
-                        protobuf::SortNode {
-                            input: Some(Box::new(input)),
-                            expr: selection_expr,
-                        },
-                    ))),
-                })
-            }
-            LogicalPlan::Repartition {
-                input,
-                partitioning_scheme,
-            } => {
-                use datafusion::logical_plan::Partitioning;
-                let input: protobuf::LogicalPlanNode = input.as_ref().try_into()?;
-
-                //Assumed common usize field was batch size
-                //Used u64 to avoid any nastyness involving large values, most data clusters are probably uniformly 64 bits any ways
-                use protobuf::repartition_node::PartitionMethod;
-
-                let pb_partition_method = match partitioning_scheme {
-                    Partitioning::Hash(exprs, partition_count) => {
-                        PartitionMethod::Hash(protobuf::HashRepartition {
-                            hash_expr: exprs
-                                .iter()
-                                .map(|expr| expr.try_into())
-                                .collect::<Result<Vec<_>, BallistaError>>()?,
-                            partition_count: *partition_count as u64,
-                        })
-                    }
-                    Partitioning::RoundRobinBatch(batch_size) => {
-                        PartitionMethod::RoundRobin(*batch_size as u64)
-                    }
-                };
-
-                Ok(protobuf::LogicalPlanNode {
-                    logical_plan_type: Some(LogicalPlanType::Repartition(Box::new(
-                        protobuf::RepartitionNode {
-                            input: Some(Box::new(input)),
-                            partition_method: Some(pb_partition_method),
-                        },
-                    ))),
-                })
-            }
-            LogicalPlan::EmptyRelation {
-                produce_one_row, ..
-            } => Ok(protobuf::LogicalPlanNode {
-                logical_plan_type: Some(LogicalPlanType::EmptyRelation(
-                    protobuf::EmptyRelationNode {
-                        produce_one_row: *produce_one_row,
-                    },
-                )),
-            }),
-            LogicalPlan::CreateExternalTable {
-                name,
-                location,
-                file_type,
-                has_header,
-                schema: df_schema,
-            } => {
-                use datafusion::sql::parser::FileType;
-                let schema: Schema = df_schema.as_ref().clone().into();
-                let pb_schema: protobuf::Schema = (&schema).try_into().map_err(|e| {
-                    BallistaError::General(format!(
-                        "Could not convert schema into protobuf: {:?}",
-                        e
-                    ))
-                })?;
-
-                let pb_file_type: protobuf::FileType = match file_type {
-                    FileType::NdJson => protobuf::FileType::NdJson,
-                    FileType::Parquet => protobuf::FileType::Parquet,
-                    FileType::CSV => protobuf::FileType::Csv,
-                };
-
-                Ok(protobuf::LogicalPlanNode {
-                    logical_plan_type: Some(LogicalPlanType::CreateExternalTable(
-                        protobuf::CreateExternalTableNode {
-                            name: name.clone(),
-                            location: location.clone(),
-                            file_type: pb_file_type as i32,
-                            has_header: *has_header,
-                            schema: Some(pb_schema),
-                        },
-                    )),
-                })
-            }
-            LogicalPlan::Explain { verbose, plan, .. } => {
-                let input: protobuf::LogicalPlanNode = plan.as_ref().try_into()?;
-                Ok(protobuf::LogicalPlanNode {
-                    logical_plan_type: Some(LogicalPlanType::Explain(Box::new(
-                        protobuf::ExplainNode {
-                            input: Some(Box::new(input)),
-                            verbose: *verbose,
-                        },
-                    ))),
-                })
-            }
-            LogicalPlan::Extension { .. } => unimplemented!(),
-            LogicalPlan::Union { .. } => unimplemented!(),
-        }
-    }
-}
-
-fn create_proto_scalar<I, T: FnOnce(&I) -> protobuf::scalar_value::Value>(
-    v: &Option<I>,
-    null_arrow_type: protobuf::PrimitiveScalarType,
-    constructor: T,
-) -> protobuf::ScalarValue {
-    protobuf::ScalarValue {
-        value: Some(v.as_ref().map(constructor).unwrap_or(
-            protobuf::scalar_value::Value::NullValue(null_arrow_type as i32),
-        )),
-    }
-}
-
-impl TryInto<protobuf::LogicalExprNode> for &Expr {
-    type Error = BallistaError;
-
-    fn try_into(self) -> Result<protobuf::LogicalExprNode, Self::Error> {
-        use datafusion::scalar::ScalarValue;
-        use protobuf::scalar_value::Value;
-        match self {
-            Expr::Column(name) => {
-                let expr = protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::ColumnName(name.clone())),
-                };
-                Ok(expr)
-            }
-            Expr::Alias(expr, alias) => {
-                let alias = Box::new(protobuf::AliasNode {
-                    expr: Some(Box::new(expr.as_ref().try_into()?)),
-                    alias: alias.to_owned(),
-                });
-                let expr = protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::Alias(alias)),
-                };
-                Ok(expr)
-            }
-            Expr::Literal(value) => {
-                let pb_value: protobuf::ScalarValue = value.try_into()?;
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::Literal(pb_value)),
-                })
-            }
-            Expr::BinaryExpr { left, op, right } => {
-                let binary_expr = Box::new(protobuf::BinaryExprNode {
-                    l: Some(Box::new(left.as_ref().try_into()?)),
-                    r: Some(Box::new(right.as_ref().try_into()?)),
-                    op: format!("{:?}", op),
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::BinaryExpr(binary_expr)),
-                })
-            }
-            Expr::AggregateFunction {
-                ref fun, ref args, ..
-            } => {
-                let aggr_function = match fun {
-                    AggregateFunction::Min => protobuf::AggregateFunction::Min,
-                    AggregateFunction::Max => protobuf::AggregateFunction::Max,
-                    AggregateFunction::Sum => protobuf::AggregateFunction::Sum,
-                    AggregateFunction::Avg => protobuf::AggregateFunction::Avg,
-                    AggregateFunction::Count => protobuf::AggregateFunction::Count,
-                };
-
-                let arg = &args[0];
-                let aggregate_expr = Box::new(protobuf::AggregateExprNode {
-                    aggr_function: aggr_function.into(),
-                    expr: Some(Box::new(arg.try_into()?)),
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::AggregateExpr(aggregate_expr)),
-                })
-            }
-            Expr::ScalarVariable(_) => unimplemented!(),
-            Expr::ScalarFunction { ref fun, ref args } => {
-                let fun: protobuf::ScalarFunction = fun.try_into()?;
-                let expr: Vec<protobuf::LogicalExprNode> = args
-                    .iter()
-                    .map(|e| Ok(e.try_into()?))
-                    .collect::<Result<Vec<protobuf::LogicalExprNode>, BallistaError>>()?;
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(
-                        protobuf::logical_expr_node::ExprType::ScalarFunction(
-                            protobuf::ScalarFunctionNode {
-                                fun: fun.into(),
-                                expr,
-                            },
-                        ),
-                    ),
-                })
-            }
-            Expr::ScalarUDF { .. } => unimplemented!(),
-            Expr::AggregateUDF { .. } => unimplemented!(),
-            Expr::Not(expr) => {
-                let expr = Box::new(protobuf::Not {
-                    expr: Some(Box::new(expr.as_ref().try_into()?)),
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::NotExpr(expr)),
-                })
-            }
-            Expr::IsNull(expr) => {
-                let expr = Box::new(protobuf::IsNull {
-                    expr: Some(Box::new(expr.as_ref().try_into()?)),
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::IsNullExpr(expr)),
-                })
-            }
-            Expr::IsNotNull(expr) => {
-                let expr = Box::new(protobuf::IsNotNull {
-                    expr: Some(Box::new(expr.as_ref().try_into()?)),
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::IsNotNullExpr(expr)),
-                })
-            }
-            Expr::Between {
-                expr,
-                negated,
-                low,
-                high,
-            } => {
-                let expr = Box::new(protobuf::BetweenNode {
-                    expr: Some(Box::new(expr.as_ref().try_into()?)),
-                    negated: *negated,
-                    low: Some(Box::new(low.as_ref().try_into()?)),
-                    high: Some(Box::new(high.as_ref().try_into()?)),
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::Between(expr)),
-                })
-            }
-            Expr::Case {
-                expr,
-                when_then_expr,
-                else_expr,
-            } => {
-                let when_then_expr = when_then_expr
-                    .iter()
-                    .map(|(w, t)| {
-                        Ok(protobuf::WhenThen {
-                            when_expr: Some(w.as_ref().try_into()?),
-                            then_expr: Some(t.as_ref().try_into()?),
-                        })
-                    })
-                    .collect::<Result<Vec<protobuf::WhenThen>, BallistaError>>()?;
-                let expr = Box::new(protobuf::CaseNode {
-                    expr: match expr {
-                        Some(e) => Some(Box::new(e.as_ref().try_into()?)),
-                        None => None,
-                    },
-                    when_then_expr,
-                    else_expr: match else_expr {
-                        Some(e) => Some(Box::new(e.as_ref().try_into()?)),
-                        None => None,
-                    },
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::Case(expr)),
-                })
-            }
-            Expr::Cast { expr, data_type } => {
-                let expr = Box::new(protobuf::CastNode {
-                    expr: Some(Box::new(expr.as_ref().try_into()?)),
-                    arrow_type: Some(data_type.into()),
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::Cast(expr)),
-                })
-            }
-            Expr::Sort {
-                expr,
-                asc,
-                nulls_first,
-            } => {
-                let expr = Box::new(protobuf::SortExprNode {
-                    expr: Some(Box::new(expr.as_ref().try_into()?)),
-                    asc: *asc,
-                    nulls_first: *nulls_first,
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(ExprType::Sort(expr)),
-                })
-            }
-            Expr::Negative(expr) => {
-                let expr = Box::new(protobuf::NegativeNode {
-                    expr: Some(Box::new(expr.as_ref().try_into()?)),
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(protobuf::logical_expr_node::ExprType::Negative(
-                        expr,
-                    )),
-                })
-            }
-            Expr::InList {
-                expr,
-                list,
-                negated,
-            } => {
-                let expr = Box::new(protobuf::InListNode {
-                    expr: Some(Box::new(expr.as_ref().try_into()?)),
-                    list: list.iter().map(|expr| expr.try_into()).collect::<Result<
-                        Vec<_>,
-                        BallistaError,
-                    >>(
-                    )?,
-                    negated: *negated,
-                });
-                Ok(protobuf::LogicalExprNode {
-                    expr_type: Some(protobuf::logical_expr_node::ExprType::InList(expr)),
-                })
-            }
-            Expr::Wildcard => Ok(protobuf::LogicalExprNode {
-                expr_type: Some(protobuf::logical_expr_node::ExprType::Wildcard(true)),
-            }),
-            Expr::TryCast { .. } => unimplemented!(),
-        }
-    }
-}
-
-impl Into<protobuf::Schema> for &Schema {
-    fn into(self) -> protobuf::Schema {
-        protobuf::Schema {
-            columns: self
-                .fields()
-                .iter()
-                .map(protobuf::Field::from)
-                .collect::<Vec<_>>(),
-        }
-    }
-}
-
-impl TryFrom<&arrow::datatypes::DataType> for protobuf::ScalarType {
-    type Error = BallistaError;
-    fn try_from(value: &arrow::datatypes::DataType) -> Result<Self, Self::Error> {
-        let datatype = protobuf::scalar_type::Datatype::try_from(value)?;
-        Ok(protobuf::ScalarType {
-            datatype: Some(datatype),
-        })
-    }
-}
-
-impl TryInto<protobuf::ScalarFunction> for &BuiltinScalarFunction {
-    type Error = BallistaError;
-    fn try_into(self) -> Result<protobuf::ScalarFunction, Self::Error> {
-        match self {
-            BuiltinScalarFunction::Sqrt => Ok(protobuf::ScalarFunction::Sqrt),
-            BuiltinScalarFunction::Sin => Ok(protobuf::ScalarFunction::Sin),
-            BuiltinScalarFunction::Cos => Ok(protobuf::ScalarFunction::Cos),
-            BuiltinScalarFunction::Tan => Ok(protobuf::ScalarFunction::Tan),
-            BuiltinScalarFunction::Asin => Ok(protobuf::ScalarFunction::Asin),
-            BuiltinScalarFunction::Acos => Ok(protobuf::ScalarFunction::Acos),
-            BuiltinScalarFunction::Atan => Ok(protobuf::ScalarFunction::Atan),
-            BuiltinScalarFunction::Exp => Ok(protobuf::ScalarFunction::Exp),
-            BuiltinScalarFunction::Log => Ok(protobuf::ScalarFunction::Log),
-            BuiltinScalarFunction::Log10 => Ok(protobuf::ScalarFunction::Log10),
-            BuiltinScalarFunction::Floor => Ok(protobuf::ScalarFunction::Floor),
-            BuiltinScalarFunction::Ceil => Ok(protobuf::ScalarFunction::Ceil),
-            BuiltinScalarFunction::Round => Ok(protobuf::ScalarFunction::Round),
-            BuiltinScalarFunction::Trunc => Ok(protobuf::ScalarFunction::Trunc),
-            BuiltinScalarFunction::Abs => Ok(protobuf::ScalarFunction::Abs),
-            BuiltinScalarFunction::OctetLength => {
-                Ok(protobuf::ScalarFunction::Octetlength)
-            }
-            BuiltinScalarFunction::Concat => Ok(protobuf::ScalarFunction::Concat),
-            BuiltinScalarFunction::Lower => Ok(protobuf::ScalarFunction::Lower),
-            BuiltinScalarFunction::Upper => Ok(protobuf::ScalarFunction::Upper),
-            BuiltinScalarFunction::Trim => Ok(protobuf::ScalarFunction::Trim),
-            BuiltinScalarFunction::Ltrim => Ok(protobuf::ScalarFunction::Ltrim),
-            BuiltinScalarFunction::Rtrim => Ok(protobuf::ScalarFunction::Rtrim),
-            BuiltinScalarFunction::ToTimestamp => {
-                Ok(protobuf::ScalarFunction::Totimestamp)
-            }
-            BuiltinScalarFunction::Array => Ok(protobuf::ScalarFunction::Array),
-            BuiltinScalarFunction::NullIf => Ok(protobuf::ScalarFunction::Nullif),
-            BuiltinScalarFunction::DateTrunc => Ok(protobuf::ScalarFunction::Datetrunc),
-            BuiltinScalarFunction::MD5 => Ok(protobuf::ScalarFunction::Md5),
-            BuiltinScalarFunction::SHA224 => Ok(protobuf::ScalarFunction::Sha224),
-            BuiltinScalarFunction::SHA256 => Ok(protobuf::ScalarFunction::Sha256),
-            BuiltinScalarFunction::SHA384 => Ok(protobuf::ScalarFunction::Sha384),
-            BuiltinScalarFunction::SHA512 => Ok(protobuf::ScalarFunction::Sha512),
-            _ => Err(BallistaError::General(format!(
-                "logical_plan::to_proto() unsupported scalar function {:?}",
-                self
-            ))),
-        }
-    }
-}
diff --git a/rust/ballista/rust/core/src/serde/mod.rs b/rust/ballista/rust/core/src/serde/mod.rs
deleted file mode 100644
index b961639..0000000
--- a/rust/ballista/rust/core/src/serde/mod.rs
+++ /dev/null
@@ -1,69 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! This crate contains code generated from the Ballista Protocol Buffer Definition as well
-//! as convenience code for interacting with the generated code.
-
-use std::{convert::TryInto, io::Cursor};
-
-use crate::{error::BallistaError, serde::scheduler::Action as BallistaAction};
-
-use prost::Message;
-
-// include the generated protobuf source as a submodule
-#[allow(clippy::all)]
-pub mod protobuf {
-    include!(concat!(env!("OUT_DIR"), "/ballista.protobuf.rs"));
-}
-
-pub mod logical_plan;
-pub mod physical_plan;
-pub mod scheduler;
-
-pub fn decode_protobuf(bytes: &[u8]) -> Result<BallistaAction, BallistaError> {
-    let mut buf = Cursor::new(bytes);
-
-    protobuf::Action::decode(&mut buf)
-        .map_err(|e| BallistaError::Internal(format!("{:?}", e)))
-        .and_then(|node| node.try_into())
-}
-
-pub(crate) fn proto_error<S: Into<String>>(message: S) -> BallistaError {
-    BallistaError::General(message.into())
-}
-
-#[macro_export]
-macro_rules! convert_required {
-    ($PB:expr) => {{
-        if let Some(field) = $PB.as_ref() {
-            field.try_into()
-        } else {
-            Err(proto_error("Missing required field in protobuf"))
-        }
-    }};
-}
-
-#[macro_export]
-macro_rules! convert_box_required {
-    ($PB:expr) => {{
-        if let Some(field) = $PB.as_ref() {
-            field.as_ref().try_into()
-        } else {
-            Err(proto_error("Missing required field in protobuf"))
-        }
-    }};
-}
diff --git a/rust/ballista/rust/core/src/serde/physical_plan/from_proto.rs b/rust/ballista/rust/core/src/serde/physical_plan/from_proto.rs
deleted file mode 100644
index be0777d..0000000
--- a/rust/ballista/rust/core/src/serde/physical_plan/from_proto.rs
+++ /dev/null
@@ -1,398 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-//! Serde code to convert from protocol buffers to Rust data structures.
-
-use std::collections::HashMap;
-use std::convert::TryInto;
-use std::sync::Arc;
-
-use crate::error::BallistaError;
-use crate::execution_plans::{ShuffleReaderExec, UnresolvedShuffleExec};
-use crate::serde::protobuf::repartition_exec_node::PartitionMethod;
-use crate::serde::protobuf::LogicalExprNode;
-use crate::serde::scheduler::PartitionLocation;
-use crate::serde::{proto_error, protobuf};
-use crate::{convert_box_required, convert_required};
-
-use arrow::datatypes::{DataType, Schema, SchemaRef};
-use datafusion::catalog::catalog::{
-    CatalogList, CatalogProvider, MemoryCatalogList, MemoryCatalogProvider,
-};
-use datafusion::execution::context::{ExecutionConfig, ExecutionContextState};
-use datafusion::logical_plan::{DFSchema, Expr};
-use datafusion::physical_plan::aggregates::{create_aggregate_expr, AggregateFunction};
-use datafusion::physical_plan::expressions::col;
-use datafusion::physical_plan::hash_aggregate::{AggregateMode, HashAggregateExec};
-use datafusion::physical_plan::hash_join::PartitionMode;
-use datafusion::physical_plan::merge::MergeExec;
-use datafusion::physical_plan::planner::DefaultPhysicalPlanner;
-use datafusion::physical_plan::{
-    coalesce_batches::CoalesceBatchesExec,
-    csv::CsvExec,
-    empty::EmptyExec,
-    expressions::{Avg, Column, PhysicalSortExpr},
-    filter::FilterExec,
-    hash_join::HashJoinExec,
-    hash_utils::JoinType,
-    limit::{GlobalLimitExec, LocalLimitExec},
-    parquet::ParquetExec,
-    projection::ProjectionExec,
-    repartition::RepartitionExec,
-    sort::{SortExec, SortOptions},
-    Partitioning,
-};
-use datafusion::physical_plan::{AggregateExpr, ExecutionPlan, PhysicalExpr};
-use datafusion::prelude::CsvReadOptions;
-use log::debug;
-use protobuf::logical_expr_node::ExprType;
-use protobuf::physical_plan_node::PhysicalPlanType;
-
-impl TryInto<Arc<dyn ExecutionPlan>> for &protobuf::PhysicalPlanNode {
-    type Error = BallistaError;
-
-    fn try_into(self) -> Result<Arc<dyn ExecutionPlan>, Self::Error> {
-        let plan = self.physical_plan_type.as_ref().ok_or_else(|| {
-            proto_error(format!(
-                "physical_plan::from_proto() Unsupported physical plan '{:?}'",
-                self
-            ))
-        })?;
-        match plan {
-            PhysicalPlanType::Projection(projection) => {
-                let input: Arc<dyn ExecutionPlan> =
-                    convert_box_required!(projection.input)?;
-                let exprs = projection
-                    .expr
-                    .iter()
-                    .zip(projection.expr_name.iter())
-                    .map(|(expr, name)| {
-                        compile_expr(expr, &input.schema()).map(|e| (e, name.to_string()))
-                    })
-                    .collect::<Result<Vec<_>, _>>()?;
-                Ok(Arc::new(ProjectionExec::try_new(exprs, input)?))
-            }
-            PhysicalPlanType::Filter(filter) => {
-                let input: Arc<dyn ExecutionPlan> = convert_box_required!(filter.input)?;
-                let predicate = compile_expr(
-                    filter.expr.as_ref().ok_or_else(|| {
-                        BallistaError::General(
-                            "filter (FilterExecNode) in PhysicalPlanNode is missing."
-                                .to_owned(),
-                        )
-                    })?,
-                    &input.schema(),
-                )?;
-                Ok(Arc::new(FilterExec::try_new(predicate, input)?))
-            }
-            PhysicalPlanType::CsvScan(scan) => {
-                let schema = Arc::new(convert_required!(scan.schema)?);
-                let options = CsvReadOptions::new()
-                    .has_header(scan.has_header)
-                    .file_extension(&scan.file_extension)
-                    .delimiter(scan.delimiter.as_bytes()[0])
-                    .schema(&schema);
-                let projection = scan.projection.iter().map(|i| *i as usize).collect();
-                Ok(Arc::new(CsvExec::try_new(
-                    &scan.path,
-                    options,
-                    Some(projection),
-                    scan.batch_size as usize,
-                    None,
-                )?))
-            }
-            PhysicalPlanType::ParquetScan(scan) => {
-                let projection = scan.projection.iter().map(|i| *i as usize).collect();
-                let filenames: Vec<&str> =
-                    scan.filename.iter().map(|s| s.as_str()).collect();
-                Ok(Arc::new(ParquetExec::try_from_files(
-                    &filenames,
-                    Some(projection),
-                    None,
-                    scan.batch_size as usize,
-                    scan.num_partitions as usize,
-                    None,
-                )?))
-            }
-            PhysicalPlanType::CoalesceBatches(coalesce_batches) => {
-                let input: Arc<dyn ExecutionPlan> =
-                    convert_box_required!(coalesce_batches.input)?;
-                Ok(Arc::new(CoalesceBatchesExec::new(
-                    input,
-                    coalesce_batches.target_batch_size as usize,
-                )))
-            }
-            PhysicalPlanType::Merge(merge) => {
-                let input: Arc<dyn ExecutionPlan> = convert_box_required!(merge.input)?;
-                Ok(Arc::new(MergeExec::new(input)))
-            }
-            PhysicalPlanType::Repartition(repart) => {
-                let input: Arc<dyn ExecutionPlan> = convert_box_required!(repart.input)?;
-                match repart.partition_method {
-                    Some(PartitionMethod::Hash(ref hash_part)) => {
-                        let expr = hash_part
-                            .hash_expr
-                            .iter()
-                            .map(|e| compile_expr(e, &input.schema()))
-                            .collect::<Result<Vec<Arc<dyn PhysicalExpr>>, _>>()?;
-
-                        Ok(Arc::new(RepartitionExec::try_new(
-                            input,
-                            Partitioning::Hash(
-                                expr,
-                                hash_part.partition_count.try_into().unwrap(),
-                            ),
-                        )?))
-                    }
-                    Some(PartitionMethod::RoundRobin(partition_count)) => {
-                        Ok(Arc::new(RepartitionExec::try_new(
-                            input,
-                            Partitioning::RoundRobinBatch(
-                                partition_count.try_into().unwrap(),
-                            ),
-                        )?))
-                    }
-                    Some(PartitionMethod::Unknown(partition_count)) => {
-                        Ok(Arc::new(RepartitionExec::try_new(
-                            input,
-                            Partitioning::UnknownPartitioning(
-                                partition_count.try_into().unwrap(),
-                            ),
-                        )?))
-                    }
-                    _ => Err(BallistaError::General(
-                        "Invalid partitioning scheme".to_owned(),
-                    )),
-                }
-            }
-            PhysicalPlanType::GlobalLimit(limit) => {
-                let input: Arc<dyn ExecutionPlan> = convert_box_required!(limit.input)?;
-                Ok(Arc::new(GlobalLimitExec::new(input, limit.limit as usize)))
-            }
-            PhysicalPlanType::LocalLimit(limit) => {
-                let input: Arc<dyn ExecutionPlan> = convert_box_required!(limit.input)?;
-                Ok(Arc::new(LocalLimitExec::new(input, limit.limit as usize)))
-            }
-            PhysicalPlanType::HashAggregate(hash_agg) => {
-                let input: Arc<dyn ExecutionPlan> =
-                    convert_box_required!(hash_agg.input)?;
-                let mode = protobuf::AggregateMode::from_i32(hash_agg.mode).ok_or_else(|| {
-                    proto_error(format!(
-                        "Received a HashAggregateNode message with unknown AggregateMode {}",
-                        hash_agg.mode
-                    ))
-                })?;
-                let agg_mode: AggregateMode = match mode {
-                    protobuf::AggregateMode::Partial => AggregateMode::Partial,
-                    protobuf::AggregateMode::Final => AggregateMode::Final,
-                };
-
-                let group = hash_agg
-                    .group_expr
-                    .iter()
-                    .zip(hash_agg.group_expr_name.iter())
-                    .map(|(expr, name)| {
-                        compile_expr(expr, &input.schema()).map(|e| (e, name.to_string()))
-                    })
-                    .collect::<Result<Vec<_>, _>>()?;
-
-                let logical_agg_expr: Vec<(Expr, String)> = hash_agg
-                    .aggr_expr
-                    .iter()
-                    .zip(hash_agg.aggr_expr_name.iter())
-                    .map(|(expr, name)| expr.try_into().map(|expr| (expr, name.clone())))
-                    .collect::<Result<Vec<_>, _>>()?;
-
-                let df_planner = DefaultPhysicalPlanner::default();
-                let catalog_list =
-                    Arc::new(MemoryCatalogList::new()) as Arc<dyn CatalogList>;
-                let ctx_state = ExecutionContextState {
-                    catalog_list,
-                    scalar_functions: Default::default(),
-                    var_provider: Default::default(),
-                    aggregate_functions: Default::default(),
-                    config: ExecutionConfig::new(),
-                };
-
-                let input_schema = hash_agg
-                    .input_schema
-                    .as_ref()
-                    .ok_or_else(|| {
-                        BallistaError::General(
-                            "input_schema in HashAggregateNode is missing.".to_owned(),
-                        )
-                    })?
-                    .clone();
-                let physical_schema: SchemaRef =
-                    SchemaRef::new((&input_schema).try_into()?);
-
-                let mut physical_aggr_expr = vec![];
-
-                for (expr, name) in &logical_agg_expr {
-                    match expr {
-                        Expr::AggregateFunction { fun, args, .. } => {
-                            let arg = df_planner
-                                .create_physical_expr(
-                                    &args[0],
-                                    &physical_schema,
-                                    &ctx_state,
-                                )
-                                .map_err(|e| {
-                                    BallistaError::General(format!("{:?}", e))
-                                })?;
-                            physical_aggr_expr.push(create_aggregate_expr(
-                                &fun,
-                                false,
-                                &[arg],
-                                &physical_schema,
-                                name.to_string(),
-                            )?);
-                        }
-                        _ => {
-                            return Err(BallistaError::General(
-                                "Invalid expression for HashAggregateExec".to_string(),
-                            ))
-                        }
-                    }
-                }
-                Ok(Arc::new(HashAggregateExec::try_new(
-                    agg_mode,
-                    group,
-                    physical_aggr_expr,
-                    input,
-                    Arc::new((&input_schema).try_into()?),
-                )?))
-            }
-            PhysicalPlanType::HashJoin(hashjoin) => {
-                let left: Arc<dyn ExecutionPlan> = convert_box_required!(hashjoin.left)?;
-                let right: Arc<dyn ExecutionPlan> =
-                    convert_box_required!(hashjoin.right)?;
-                let on: Vec<(String, String)> = hashjoin
-                    .on
-                    .iter()
-                    .map(|col| (col.left.clone(), col.right.clone()))
-                    .collect();
-                let join_type = protobuf::JoinType::from_i32(hashjoin.join_type)
-                    .ok_or_else(|| {
-                        proto_error(format!(
-                            "Received a HashJoinNode message with unknown JoinType {}",
-                            hashjoin.join_type
-                        ))
-                    })?;
-                let join_type = match join_type {
-                    protobuf::JoinType::Inner => JoinType::Inner,
-                    protobuf::JoinType::Left => JoinType::Left,
-                    protobuf::JoinType::Right => JoinType::Right,
-                };
-                Ok(Arc::new(HashJoinExec::try_new(
-                    left,
-                    right,
-                    &on,
-                    &join_type,
-                    PartitionMode::CollectLeft,
-                )?))
-            }
-            PhysicalPlanType::ShuffleReader(shuffle_reader) => {
-                let schema = Arc::new(convert_required!(shuffle_reader.schema)?);
-                let partition_location: Vec<PartitionLocation> = shuffle_reader
-                    .partition_location
-                    .iter()
-                    .map(|p| p.clone().try_into())
-                    .collect::<Result<Vec<_>, BallistaError>>()?;
-                let shuffle_reader =
-                    ShuffleReaderExec::try_new(partition_location, schema)?;
-                Ok(Arc::new(shuffle_reader))
-            }
-            PhysicalPlanType::Empty(empty) => {
-                let schema = Arc::new(convert_required!(empty.schema)?);
-                Ok(Arc::new(EmptyExec::new(empty.produce_one_row, schema)))
-            }
-            PhysicalPlanType::Sort(sort) => {
-                let input: Arc<dyn ExecutionPlan> = convert_box_required!(sort.input)?;
-                let exprs = sort
-                    .expr
-                    .iter()
-                    .map(|expr| {
-                        let expr = expr.expr_type.as_ref().ok_or_else(|| {
-                            proto_error(format!(
-                                "physical_plan::from_proto() Unexpected expr {:?}",
-                                self
-                            ))
-                        })?;
-                        if let protobuf::logical_expr_node::ExprType::Sort(sort_expr) = expr {
-                            let expr = sort_expr
-                                .expr
-                                .as_ref()
-                                .ok_or_else(|| {
-                                    proto_error(format!(
-                                        "physical_plan::from_proto() Unexpected sort expr {:?}",
-                                        self
-                                    ))
-                                })?
-                                .as_ref();
-                            Ok(PhysicalSortExpr {
-                                expr: compile_expr(expr, &input.schema())?,
-                                options: SortOptions {
-                                    descending: !sort_expr.asc,
-                                    nulls_first: sort_expr.nulls_first,
-                                },
-                            })
-                        } else {
-                            Err(BallistaError::General(format!(
-                                "physical_plan::from_proto() {:?}",
-                                self
-                            )))
-                        }
-                    })
-                    .collect::<Result<Vec<_>, _>>()?;
-                // Update concurrency here in the future
-                Ok(Arc::new(SortExec::try_new(exprs, input)?))
-            }
-            PhysicalPlanType::Unresolved(unresolved_shuffle) => {
-                let schema = Arc::new(convert_required!(unresolved_shuffle.schema)?);
-                Ok(Arc::new(UnresolvedShuffleExec {
-                    query_stage_ids: unresolved_shuffle
-                        .query_stage_ids
-                        .iter()
-                        .map(|id| *id as usize)
-                        .collect(),
-                    schema,
-                    partition_count: unresolved_shuffle.partition_count as usize,
-                }))
-            }
-        }
-    }
-}
-
-fn compile_expr(
-    expr: &protobuf::LogicalExprNode,
-    schema: &Schema,
-) -> Result<Arc<dyn PhysicalExpr>, BallistaError> {
-    let df_planner = DefaultPhysicalPlanner::default();
-    let catalog_list = Arc::new(MemoryCatalogList::new()) as Arc<dyn CatalogList>;
-    let state = ExecutionContextState {
-        catalog_list,
-        scalar_functions: HashMap::new(),
-        var_provider: HashMap::new(),
-        aggregate_functions: HashMap::new(),
-        config: ExecutionConfig::new(),
-    };
-    let expr: Expr = expr.try_into()?;
-    df_planner
-        .create_physical_expr(&expr, schema, &state)
-        .map_err(|e| BallistaError::General(format!("{:?}", e)))
-}
diff --git a/rust/ballista/rust/core/src/serde/physical_plan/mod.rs b/rust/ballista/rust/core/src/serde/physical_plan/mod.rs
deleted file mode 100644
index e7985cc..0000000
--- a/rust/ballista/rust/core/src/serde/physical_plan/mod.rs
+++ /dev/null
@@ -1,178 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-pub mod from_proto;
-pub mod to_proto;
-
-#[cfg(test)]
-mod roundtrip_tests {
-    use datafusion::physical_plan::hash_utils::JoinType;
-    use std::{convert::TryInto, sync::Arc};
-
-    use arrow::datatypes::{DataType, Schema};
-    use datafusion::physical_plan::ColumnarValue;
-    use datafusion::physical_plan::{
-        empty::EmptyExec,
-        expressions::{Avg, Column, PhysicalSortExpr},
-        hash_aggregate::{AggregateMode, HashAggregateExec},
-        hash_join::HashJoinExec,
-        limit::{GlobalLimitExec, LocalLimitExec},
-        sort::SortExec,
-        ExecutionPlan,
-    };
-    use datafusion::physical_plan::{
-        AggregateExpr, Distribution, Partitioning, PhysicalExpr,
-    };
-
-    use super::super::super::error::Result;
-    use super::super::protobuf;
-    use datafusion::physical_plan::hash_join::PartitionMode;
-
-    fn roundtrip_test(exec_plan: Arc<dyn ExecutionPlan>) -> Result<()> {
-        let proto: protobuf::PhysicalPlanNode = exec_plan.clone().try_into()?;
-        let result_exec_plan: Arc<dyn ExecutionPlan> = (&proto).try_into()?;
-        assert_eq!(
-            format!("{:?}", exec_plan),
-            format!("{:?}", result_exec_plan)
-        );
-        Ok(())
-    }
-
-    #[test]
-    fn roundtrip_empty() -> Result<()> {
-        roundtrip_test(Arc::new(EmptyExec::new(false, Arc::new(Schema::empty()))))
-    }
-
-    #[test]
-    fn roundtrip_local_limit() -> Result<()> {
-        roundtrip_test(Arc::new(LocalLimitExec::new(
-            Arc::new(EmptyExec::new(false, Arc::new(Schema::empty()))),
-            25,
-        )))
-    }
-
-    #[test]
-    fn roundtrip_global_limit() -> Result<()> {
-        roundtrip_test(Arc::new(GlobalLimitExec::new(
-            Arc::new(EmptyExec::new(false, Arc::new(Schema::empty()))),
-            25,
-        )))
-    }
-
-    #[test]
-    fn roundtrip_hash_join() -> Result<()> {
-        use arrow::datatypes::{DataType, Field, Schema};
-        let field_a = Field::new("col", DataType::Int64, false);
-        let schema_left = Schema::new(vec![field_a.clone()]);
-        let schema_right = Schema::new(vec![field_a]);
-
-        roundtrip_test(Arc::new(HashJoinExec::try_new(
-            Arc::new(EmptyExec::new(false, Arc::new(schema_left))),
-            Arc::new(EmptyExec::new(false, Arc::new(schema_right))),
-            &[("col".to_string(), "col".to_string())],
-            &JoinType::Inner,
-            PartitionMode::CollectLeft,
-        )?))
-    }
-
-    fn col(name: &str) -> Arc<dyn PhysicalExpr> {
-        Arc::new(Column::new(name))
-    }
-
-    #[test]
-    fn rountrip_hash_aggregate() -> Result<()> {
-        use arrow::datatypes::{DataType, Field, Schema};
-        let groups: Vec<(Arc<dyn PhysicalExpr>, String)> =
-            vec![(col("a"), "unused".to_string())];
-
... 70207 lines suppressed ...