You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by mb...@apache.org on 2022/02/02 18:55:29 UTC

[asterixdb] branch master updated (4ba9293 -> 412de58)

This is an automated email from the ASF dual-hosted git repository.

mblow pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git.


    from 4ba9293  Merge release 0.9.7 into master
     new e22d76c  [NO ISSUE][IDX] Do not store source indicators for primary key indexes
     new 9abc3a8  [NO ISSUE][STO] Skip flush on datasets with no open indexes
     new 5f52a5f  [NO ISSUE][CONF] Removed unused active partitions config
     new 2582481  Merge release 0.9.7
     new 9e1c416  [ASTERIXDB-2981][EXT] Handle Parquet specialized types
     new 11a30f1   [ASTERIXDB-3007][COMP] Fix ConsolidateWindowOperatorsRule
     new 594eb45  [NO ISSUE][OTH] Replace usage of FileUtils.listFiles
     new 4ae6e5f  Merge branch 'gerrit/stabilization-5949a1cb71'
     new 9679811  Merge branch 'gerrit/stabilization-02ea049d7a'
     new f8403e3  (Null) merge branch 'gerrit/stabilization-5e11053887'
     new e8c2042  [ASTERIXDB-3007][TEST] Fix PlanVerifierTestBase
     new 412de58  Merge branch 'gerrit/neo'

The 12 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../apache/asterix/app/nc/NCAppRuntimeContext.java |   2 +-
 .../org/apache/asterix/app/nc/ReplicaManager.java  |  11 +-
 .../asterix/app/nc/task/UpdateNodeStatusTask.java  |   4 +-
 .../app/replication/NcLifecycleCoordinator.java    |   5 +-
 .../message/NCLifecycleTaskReportMessage.java      |   6 +
 .../message/RegistrationTasksRequestMessage.java   |  20 +-
 .../message/RegistrationTasksResponseMessage.java  |   8 +-
 .../asterix/app/translator/QueryTranslator.java    |   9 +-
 .../asterix/hyracks/bootstrap/NCApplication.java   |   4 +-
 .../external_dataset/ExternalDatasetTestUtils.java |  20 +-
 .../aws/AwsS3ExternalDatasetTest.java              |   2 +-
 .../AzureBlobStorageExternalDatasetTest.java       |   2 +-
 .../{ => parquet}/BinaryFileConverterUtil.java     |  10 +-
 .../parquet/ParquetFileExampleGeneratorUtil.java   | 210 ++++++++++++
 .../apache/asterix/test/runtime/HDFSCluster.java   |   6 +-
 .../queries/window/win_opt_02/win_opt_02_1.sqlpp}  |  29 +-
 .../results/window/win_opt_02/win_opt_02_1.plan    |  23 ++
 .../parquet-types/decimal/decimal.1.ddl.sqlpp}     |  38 ++-
 .../parquet-types/decimal/decimal.2.query.sqlpp}   |  25 +-
 .../parquet-types/decimal/decimal.3.query.sqlpp}   |  22 +-
 .../disable-json-parsing.1.ddl.sqlpp}              |  29 +-
 .../disable-json-parsing.2.query.sqlpp}            |  22 +-
 .../invalid-timezone/temporal.1.ddl.sqlpp}         |  29 +-
 .../parquet-types/temporal/temporal.1.ddl.sqlpp}   |  29 +-
 .../parquet-types/temporal/tempral.2.query.sqlpp}  |  35 +-
 .../unset-flags/unset-flags.1.ddl.sqlpp}           |  28 +-
 .../unset-flags/unset-flags.2.query.sqlpp          |  60 ++++
 .../window/win_opt_02/win_opt_02.10.query.sqlpp}   |  29 +-
 .../parquet/parquet-types/decimal/decimal.02.adm   |   1 +
 .../disable-json-parsing.02.adm                    |   1 +
 .../parquet/parquet-types/temporal/temporal.02.adm |   1 +
 .../parquet-types/unset-flags/unset-flags.02.adm   |   1 +
 .../results/window/win_opt_02/win_opt_02.10.adm    |  10 +
 .../runtimets/testsuite_external_dataset_s3.xml    |  40 +++
 .../asterix/common/config/MetadataProperties.java  |   4 +-
 .../asterix/common/config/NodeProperties.java      |   3 +-
 .../asterix/common/config/PropertiesAccessor.java  |  18 +-
 .../context/PrimaryIndexOperationTracker.java      |  12 +-
 .../asterix/common/exceptions/ErrorCode.java       |   6 +
 .../asterix/common/utils/NcLocalCounters.java      |   6 +
 .../src/main/resources/asx_errormsg/en.properties  |   6 +
 .../asterix-doc/src/site/markdown/sqlpp/parquet.md | 363 +++++++++++++++++++++
 ...ue.java => AsterixParquetRuntimeException.java} |  22 +-
 .../parquet/AsterixTypeToParquetTypeVisitor.java   | 180 ++++++++--
 .../reader/hdfs/parquet/AtomicConverter.java       | 121 -------
 .../hdfs/parquet/ParquetFileRecordReader.java      |   6 +-
 .../reader/hdfs/parquet/ParquetReadSupport.java    |  37 ++-
 .../hdfs/parquet/{ => converter}/IFieldValue.java  |   4 +-
 .../parquet/converter/ParquetConverterContext.java | 274 ++++++++++++++++
 .../nested}/AbstractComplexConverter.java          |  45 ++-
 .../{ => converter/nested}/ArrayConverter.java     |  20 +-
 .../{ => converter/nested}/ObjectConverter.java    |  21 +-
 .../{ => converter/nested}/RepeatedConverter.java  |  20 +-
 .../{ => converter/nested}/RootConverter.java      |  15 +-
 .../primitve/BinaryConverter.java}                 |  41 +--
 .../primitve/DateConverter.java}                   |  24 +-
 .../converter/primitve/DecimalConverter.java       |  83 +++++
 .../primitve/GenericPrimitiveConverter.java        |  86 +++++
 .../converter/primitve/JsonStringConverter.java    |  77 +++++
 .../{ => converter/primitve}/MissingConverter.java |   2 +-
 .../primitve/PrimitiveConverterProvider.java       | 118 +++++++
 .../parquet/converter/primitve/TimeConverter.java  |  65 ++++
 .../converter/primitve/TimestampConverter.java     |  75 +++++
 .../primitve/UUIDConverter.java}                   |  43 +--
 .../primitve/UnsignedIntegerConverter.java         |  55 ++++
 .../external/parser/jackson/ParserContext.java     |  23 +-
 .../external/util/ExternalDataConstants.java       |  38 +++
 .../asterix/external/util/ExternalDataUtils.java   |  20 +-
 .../apache/asterix/external/util/HDFSUtils.java    |  43 ++-
 ...calOperatorDeepCopyWithNewVariablesVisitor.java |   4 +-
 .../logical/visitors/OperatorDeepCopyVisitor.java  |   4 +-
 .../core/algebra/plan/PlanStructureVerifier.java   |  35 +-
 .../core/algebra/plan/PlanVerifierTestBase.java    |   4 +-
 .../rules/ConsolidateWindowOperatorsRule.java      |  20 +-
 .../java/org/apache/hyracks/util/ExitUtil.java     |   1 +
 75 files changed, 2255 insertions(+), 560 deletions(-)
 rename asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/{ => parquet}/BinaryFileConverterUtil.java (87%)
 create mode 100644 asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/parquet/ParquetFileExampleGeneratorUtil.java
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/optimizerts/queries/window/win_opt_02/win_opt_02_1.sqlpp} (64%)
 create mode 100644 asterixdb/asterix-app/src/test/resources/optimizerts/results/window/win_opt_02/win_opt_02_1.plan
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.1.ddl.sqlpp} (58%)
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.2.query.sqlpp} (64%)
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.3.query.sqlpp} (64%)
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.1.ddl.sqlpp} (64%)
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.2.query.sqlpp} (64%)
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/invalid-timezone/temporal.1.ddl.sqlpp} (64%)
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/temporal/temporal.1.ddl.sqlpp} (64%)
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/temporal/tempral.2.query.sqlpp} (55%)
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.1.ddl.sqlpp} (64%)
 create mode 100644 asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.2.query.sqlpp
 copy asterixdb/{asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java => asterix-app/src/test/resources/runtimets/queries_sqlpp/window/win_opt_02/win_opt_02.10.query.sqlpp} (64%)
 create mode 100644 asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/decimal/decimal.02.adm
 create mode 100644 asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.02.adm
 create mode 100644 asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/temporal/temporal.02.adm
 create mode 100644 asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.02.adm
 create mode 100644 asterixdb/asterix-app/src/test/resources/runtimets/results/window/win_opt_02/win_opt_02.10.adm
 create mode 100644 asterixdb/asterix-doc/src/site/markdown/sqlpp/parquet.md
 copy asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{IFieldValue.java => AsterixParquetRuntimeException.java} (64%)
 delete mode 100644 asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AtomicConverter.java
 copy asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{ => converter}/IFieldValue.java (96%)
 create mode 100644 asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/ParquetConverterContext.java
 rename asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{ => converter/nested}/AbstractComplexConverter.java (73%)
 rename asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{ => converter/nested}/ArrayConverter.java (76%)
 rename asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{ => converter/nested}/ObjectConverter.java (77%)
 rename asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{ => converter/nested}/RepeatedConverter.java (67%)
 rename asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{ => converter/nested}/RootConverter.java (71%)
 copy asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{MissingConverter.java => converter/primitve/BinaryConverter.java} (60%)
 rename asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{IFieldValue.java => converter/primitve/DateConverter.java} (59%)
 create mode 100644 asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/DecimalConverter.java
 create mode 100644 asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/GenericPrimitiveConverter.java
 create mode 100644 asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/JsonStringConverter.java
 copy asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{ => converter/primitve}/MissingConverter.java (98%)
 create mode 100644 asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/PrimitiveConverterProvider.java
 create mode 100644 asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/TimeConverter.java
 create mode 100644 asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/TimestampConverter.java
 rename asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/{MissingConverter.java => converter/primitve/UUIDConverter.java} (60%)
 create mode 100644 asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/UnsignedIntegerConverter.java

[asterixdb] 01/12: [NO ISSUE][IDX] Do not store source indicators for primary key indexes

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit e22d76c5be4bc04591dc7f5d1188d60836e9d17c
Author: Ali Alsuliman <al...@gmail.com>
AuthorDate: Tue Jan 25 13:08:15 2022 -0800

    [NO ISSUE][IDX] Do not store source indicators for primary key indexes
    
    - user model changes: no
    - storage format changes: no
    - interface changes: no
    
    Details:
    Since secondary primary key indexes do not have/store SKs, there
    should be no matching source indicators.
    
    Change-Id: I6c2b3bb8115b6a079ccde28cb20c28a248b6b95b
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/15004
    Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Integration-Tests: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Reviewed-by: Ali Alsuliman <al...@gmail.com>
    Reviewed-by: Dmitry Lychagin <dm...@couchbase.com>
---
 .../java/org/apache/asterix/app/translator/QueryTranslator.java    | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)

diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 438397c..fa3fe37 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -1331,11 +1331,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                 List<List<String>> keyFieldNames = new ArrayList<>(indexedElementsCount);
                 List<IAType> keyFieldTypes = new ArrayList<>(indexedElementsCount);
                 List<Integer> keyFieldSourceIndicators = new ArrayList<>(indexedElementsCount);
-                if (isSecondaryPrimary) {
-                    // BACK-COMPAT: secondary primary index has one source indicator
-                    // which is set to META_RECORD_INDICATOR
-                    keyFieldSourceIndicators.add(Index.META_RECORD_INDICATOR);
-                } else {
+                // secondary primary indexes do not have search keys (no SKs), and thus no equivalent indicators
+                if (!isSecondaryPrimary) {
                     for (int i = 0; i < indexedElementsCount; i++) {
                         CreateIndexStatement.IndexedElement indexedElement = indexedElements.get(i);
                         keyFieldNames.add(indexedElement.getProjectList().get(0).first);

[asterixdb] 09/12: Merge branch 'gerrit/stabilization-02ea049d7a'

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 967981118e78484dcdc92196f72b7783ae77840c
Merge: 69d5504 4ae6e5f
Author: Michael Blow <mi...@couchbase.com>
AuthorDate: Tue Feb 1 19:25:44 2022 -0500

    Merge branch 'gerrit/stabilization-02ea049d7a'
    
    Change-Id: I92fc9437db11351d904644bfed9b56b825d72e48

 .../PersistentLocalResourceRepository.java         | 48 ++++------------------
 .../java/org/apache/hyracks/api/util/IoUtil.java   | 37 +++++++++++++++++
 2 files changed, 46 insertions(+), 39 deletions(-)

diff --cc hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/IoUtil.java
index 6ad53ab,825fdd6..2887721
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/IoUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/IoUtil.java
@@@ -19,13 -19,16 +19,18 @@@
  package org.apache.hyracks.api.util;
  
  import java.io.File;
+ import java.io.FileFilter;
  import java.io.FileNotFoundException;
+ import java.io.FilenameFilter;
  import java.io.IOException;
 +import java.nio.channels.FileChannel;
  import java.nio.file.Files;
  import java.nio.file.NoSuchFileException;
  import java.nio.file.Path;
 +import java.nio.file.StandardOpenOption;
+ import java.util.ArrayList;
+ import java.util.Collection;
+ import java.util.Objects;
  
  import org.apache.commons.io.FileUtils;
  import org.apache.hyracks.api.exceptions.ErrorCode;
@@@ -134,22 -137,35 +139,54 @@@ public class IoUtil 
          return files;
      }
  
+     /**
+      * Gets a collection of files matching {@code filter} by searching {@code root} directory and
+      * all of its subdirectories
+      *
+      * @param root
+      * @param filter
+      * @return a collection of matching files
+      */
+     public static Collection<File> getMatchingFiles(Path root, FilenameFilter filter) {
+         if (!Files.isDirectory(root)) {
+             throw new IllegalArgumentException("Parameter 'root' is not a directory: " + root);
+         }
+         Objects.requireNonNull(filter);
+         Collection<File> files = new ArrayList<>();
+         FileFilter dirOrMatchingFileFilter = file -> file.isDirectory() || filter.accept(file, file.getName());
+         collectDirFiles(root.toFile(), dirOrMatchingFileFilter, files);
+         return files;
+     }
+ 
+     private static void collectDirFiles(File dir, FileFilter filter, Collection<File> files) {
+         File[] matchingFiles = dir.listFiles(filter);
+         if (matchingFiles != null) {
+             for (File file : matchingFiles) {
+                 if (file.isDirectory()) {
+                     collectDirFiles(file, filter, files);
+                 } else {
+                     files.add(file);
+                 }
+             }
+         }
+     }
++
 +    public static void flushDirectory(File directory) throws IOException {
 +        flushDirectory(directory.toPath());
 +    }
 +
 +    public static void flushDirectory(Path path) throws IOException {
 +        if (!Files.isDirectory(path)) {
 +            throw new IOException("Not a directory: " + path);
 +        }
 +        if (Files.getFileStore(path).supportsFileAttributeView("posix")) {
 +            try (FileChannel ch = FileChannel.open(path, StandardOpenOption.READ)) {
 +                ch.force(true);
 +            }
 +        } else {
 +            if (LOGGER.isTraceEnabled()) {
 +                LOGGER.trace("Unable to flush directory " + path);
 +            }
 +        }
 +    }
  }

[asterixdb] 05/12: [ASTERIXDB-2981][EXT] Handle Parquet specialized types

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 9e1c416123e7a46458b91ae7d6713751c305b965
Author: Wail Alkowaileet <wa...@gmail.com>
AuthorDate: Mon Jan 31 10:42:20 2022 -0800

    [ASTERIXDB-2981][EXT] Handle Parquet specialized types
    
    - user model changes: yes
    - storage format changes: no
    - interface changes: no
    
    Details:
    - Handle parquet specialized types
    - Supported types: Date, Time, Timestamp, Binary, JSON,
      BSON (as Binary), Decimal (converted to double), UUID
    - By default, type conversion for decimal is not enabled
      - To enable decimal convertion, an option must be passed
        during dataset creation (see parquet-types.1.ddl.sqlpp)
    - UTC-adjusted timestamos values can re-adjusted to local
      values by setting the the timezone ID using "timezone"
      option upon creating a dataset.
    - Documentation is also included in this patch
    
    Change-Id: I345809e16aeb91f0a28d6592e1b8799e48434051
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/13903
    Reviewed-by: Ali Alsuliman <al...@gmail.com>
    Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Integration-Tests: Jenkins <je...@fulliautomatix.ics.uci.edu>
---
 .../asterix/app/translator/QueryTranslator.java    |   2 +-
 .../external_dataset/ExternalDatasetTestUtils.java |  20 +-
 .../aws/AwsS3ExternalDatasetTest.java              |   2 +-
 .../AzureBlobStorageExternalDatasetTest.java       |   2 +-
 .../{ => parquet}/BinaryFileConverterUtil.java     |  10 +-
 .../parquet/ParquetFileExampleGeneratorUtil.java   | 210 ++++++++++++
 .../apache/asterix/test/runtime/HDFSCluster.java   |   6 +-
 .../parquet-types/decimal/decimal.1.ddl.sqlpp}     |  38 ++-
 .../parquet-types/decimal/decimal.2.query.sqlpp}   |  25 +-
 .../parquet-types/decimal/decimal.3.query.sqlpp}   |  22 +-
 .../disable-json-parsing.1.ddl.sqlpp}              |  29 +-
 .../disable-json-parsing.2.query.sqlpp}            |  22 +-
 .../invalid-timezone/temporal.1.ddl.sqlpp}         |  29 +-
 .../parquet-types/temporal/temporal.1.ddl.sqlpp}   |  29 +-
 .../parquet-types/temporal/tempral.2.query.sqlpp}  |  35 +-
 .../unset-flags/unset-flags.1.ddl.sqlpp}           |  28 +-
 .../unset-flags/unset-flags.2.query.sqlpp          |  60 ++++
 .../parquet/parquet-types/decimal/decimal.02.adm   |   1 +
 .../disable-json-parsing.02.adm                    |   1 +
 .../parquet/parquet-types/temporal/temporal.02.adm |   1 +
 .../parquet-types/unset-flags/unset-flags.02.adm   |   1 +
 .../runtimets/testsuite_external_dataset_s3.xml    |  40 +++
 .../asterix/common/exceptions/ErrorCode.java       |   6 +
 .../src/main/resources/asx_errormsg/en.properties  |   6 +
 .../asterix-doc/src/site/markdown/sqlpp/parquet.md | 363 +++++++++++++++++++++
 ...ue.java => AsterixParquetRuntimeException.java} |  22 +-
 .../parquet/AsterixTypeToParquetTypeVisitor.java   | 180 ++++++++--
 .../reader/hdfs/parquet/AtomicConverter.java       | 121 -------
 .../hdfs/parquet/ParquetFileRecordReader.java      |   6 +-
 .../reader/hdfs/parquet/ParquetReadSupport.java    |  37 ++-
 .../hdfs/parquet/{ => converter}/IFieldValue.java  |   4 +-
 .../parquet/converter/ParquetConverterContext.java | 274 ++++++++++++++++
 .../nested}/AbstractComplexConverter.java          |  45 ++-
 .../{ => converter/nested}/ArrayConverter.java     |  20 +-
 .../{ => converter/nested}/ObjectConverter.java    |  21 +-
 .../{ => converter/nested}/RepeatedConverter.java  |  20 +-
 .../{ => converter/nested}/RootConverter.java      |  15 +-
 .../primitve/BinaryConverter.java}                 |  41 +--
 .../primitve/DateConverter.java}                   |  24 +-
 .../converter/primitve/DecimalConverter.java       |  83 +++++
 .../primitve/GenericPrimitiveConverter.java        |  86 +++++
 .../converter/primitve/JsonStringConverter.java    |  77 +++++
 .../{ => converter/primitve}/MissingConverter.java |   2 +-
 .../primitve/PrimitiveConverterProvider.java       | 118 +++++++
 .../parquet/converter/primitve/TimeConverter.java  |  65 ++++
 .../converter/primitve/TimestampConverter.java     |  75 +++++
 .../primitve/UUIDConverter.java}                   |  43 +--
 .../primitve/UnsignedIntegerConverter.java         |  55 ++++
 .../external/parser/jackson/ParserContext.java     |  23 +-
 .../external/util/ExternalDataConstants.java       |  38 +++
 .../asterix/external/util/ExternalDataUtils.java   |  20 +-
 .../apache/asterix/external/util/HDFSUtils.java    |  43 ++-
 52 files changed, 2071 insertions(+), 475 deletions(-)

diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index fa3fe37..eb6c779 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -1003,7 +1003,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             throws AlgebricksException {
         ExternalDetailsDecl externalDetails = (ExternalDetailsDecl) dd.getDatasetDetailsDecl();
         Map<String, String> properties = externalDetails.getProperties();
-        ExternalDataUtils.validateType(properties, (ARecordType) itemType.getDatatype());
+        ExternalDataUtils.validateParquetTypeAndConfiguration(properties, (ARecordType) itemType.getDatatype());
         return properties;
     }
 
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/ExternalDatasetTestUtils.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/ExternalDatasetTestUtils.java
index 094c1db..316d261 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/ExternalDatasetTestUtils.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/ExternalDatasetTestUtils.java
@@ -18,9 +18,9 @@
  */
 package org.apache.asterix.test.external_dataset;
 
-import static org.apache.asterix.test.external_dataset.BinaryFileConverterUtil.BINARY_GEN_BASEDIR;
 import static org.apache.asterix.test.external_dataset.aws.AwsS3ExternalDatasetTest.BOM_FILE_CONTAINER;
 import static org.apache.asterix.test.external_dataset.aws.AwsS3ExternalDatasetTest.FIXED_DATA_CONTAINER;
+import static org.apache.asterix.test.external_dataset.parquet.BinaryFileConverterUtil.BINARY_GEN_BASEDIR;
 
 import java.io.BufferedWriter;
 import java.io.File;
@@ -30,6 +30,7 @@ import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 
+import org.apache.asterix.test.external_dataset.parquet.BinaryFileConverterUtil;
 import org.apache.asterix.testframework.context.TestCaseContext;
 import org.apache.commons.io.FilenameUtils;
 import org.apache.logging.log4j.LogManager;
@@ -258,18 +259,19 @@ public class ExternalDatasetTestUtils {
     }
 
     private static void loadParquetFiles() {
-        String dataBasePath = BINARY_GEN_BASEDIR;
+        String generatedDataBasePath = BINARY_GEN_BASEDIR;
         String definition = PARQUET_DEFINITION;
 
         // Normal format
         String definitionSegment = "";
-        loadData(dataBasePath, "", "dummy_tweet.parquet", definition, definitionSegment, false, false);
-        loadData(dataBasePath, "", "id_age.parquet", definition, definitionSegment, false, false);
-        loadData(dataBasePath, "", "id_age-string.parquet", definition, definitionSegment, false, false);
-        loadData(dataBasePath, "", "id_name.parquet", definition, definitionSegment, false, false);
-        loadData(dataBasePath, "", "id_name_comment.parquet", definition, definitionSegment, false, false);
-        loadData(dataBasePath, "", "heterogeneous_1.parquet", definition, definitionSegment, false, false);
-        loadData(dataBasePath, "", "heterogeneous_2.parquet", definition, definitionSegment, false, false);
+        loadData(generatedDataBasePath, "", "dummy_tweet.parquet", definition, definitionSegment, false, false);
+        loadData(generatedDataBasePath, "", "id_age.parquet", definition, definitionSegment, false, false);
+        loadData(generatedDataBasePath, "", "id_age-string.parquet", definition, definitionSegment, false, false);
+        loadData(generatedDataBasePath, "", "id_name.parquet", definition, definitionSegment, false, false);
+        loadData(generatedDataBasePath, "", "id_name_comment.parquet", definition, definitionSegment, false, false);
+        loadData(generatedDataBasePath, "", "heterogeneous_1.parquet", definition, definitionSegment, false, false);
+        loadData(generatedDataBasePath, "", "heterogeneous_2.parquet", definition, definitionSegment, false, false);
+        loadData(generatedDataBasePath, "", "parquetTypes.parquet", definition, definitionSegment, false, false);
     }
 
     private static void loadData(String fileBasePath, String filePathSegment, String filename, String definition,
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/aws/AwsS3ExternalDatasetTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/aws/AwsS3ExternalDatasetTest.java
index 05b0d0b..785e676 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/aws/AwsS3ExternalDatasetTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/aws/AwsS3ExternalDatasetTest.java
@@ -18,10 +18,10 @@
  */
 package org.apache.asterix.test.external_dataset.aws;
 
-import static org.apache.asterix.test.external_dataset.BinaryFileConverterUtil.DEFAULT_PARQUET_SRC_PATH;
 import static org.apache.asterix.test.external_dataset.ExternalDatasetTestUtils.createBinaryFiles;
 import static org.apache.asterix.test.external_dataset.ExternalDatasetTestUtils.setDataPaths;
 import static org.apache.asterix.test.external_dataset.ExternalDatasetTestUtils.setUploaders;
+import static org.apache.asterix.test.external_dataset.parquet.BinaryFileConverterUtil.DEFAULT_PARQUET_SRC_PATH;
 import static org.apache.hyracks.util.file.FileUtil.joinPath;
 
 import java.io.ByteArrayOutputStream;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/microsoft/AzureBlobStorageExternalDatasetTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/microsoft/AzureBlobStorageExternalDatasetTest.java
index 894b4bc..7de2d7e 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/microsoft/AzureBlobStorageExternalDatasetTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/microsoft/AzureBlobStorageExternalDatasetTest.java
@@ -22,8 +22,8 @@ import static org.apache.asterix.test.common.TestConstants.Azure.AZURITE_ACCOUNT
 import static org.apache.asterix.test.common.TestConstants.Azure.AZURITE_ACCOUNT_NAME_DEFAULT;
 import static org.apache.asterix.test.common.TestConstants.Azure.BLOB_ENDPOINT_PLACEHOLDER;
 import static org.apache.asterix.test.common.TestConstants.Azure.sasToken;
-import static org.apache.asterix.test.external_dataset.BinaryFileConverterUtil.BINARY_GEN_BASEDIR;
 import static org.apache.asterix.test.external_dataset.ExternalDatasetTestUtils.PARQUET_DEFINITION;
+import static org.apache.asterix.test.external_dataset.parquet.BinaryFileConverterUtil.BINARY_GEN_BASEDIR;
 import static org.apache.hyracks.util.file.FileUtil.joinPath;
 
 import java.io.ByteArrayInputStream;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/BinaryFileConverterUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/parquet/BinaryFileConverterUtil.java
similarity index 87%
rename from asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/BinaryFileConverterUtil.java
rename to asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/parquet/BinaryFileConverterUtil.java
index d3865d3..96a8703 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/BinaryFileConverterUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/parquet/BinaryFileConverterUtil.java
@@ -16,7 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.test.external_dataset;
+package org.apache.asterix.test.external_dataset.parquet;
 
 import java.io.File;
 import java.io.FileInputStream;
@@ -64,13 +64,15 @@ public class BinaryFileConverterUtil {
             Path outputPath = new Path(destPath.getAbsolutePath(), fileName);
             writeParquetFile(jsonFile, outputPath);
         }
+        //Write parquet example that contains the specialized types
+        ParquetFileExampleGeneratorUtil.writeExample();
     }
 
     private static void writeParquetFile(File jsonInputPath, Path parquetOutputPath) throws IOException {
-        final FileInputStream schemaInputStream = new FileInputStream(jsonInputPath);
-        final FileInputStream jsonInputStream = new FileInputStream(jsonInputPath);
+        FileInputStream schemaInputStream = new FileInputStream(jsonInputPath);
+        FileInputStream jsonInputStream = new FileInputStream(jsonInputPath);
         //Infer Avro schema
-        final Schema inputSchema = JsonUtil.inferSchema(schemaInputStream, "parquet_schema", NUM_OF_RECORDS_SCHEMA);
+        Schema inputSchema = JsonUtil.inferSchema(schemaInputStream, "parquet_schema", NUM_OF_RECORDS_SCHEMA);
         try (JSONFileReader<Record> reader = new JSONFileReader<>(jsonInputStream, inputSchema, Record.class)) {
             reader.initialize();
             try (AvroParquetWriter<Record> writer = new AvroParquetWriter<>(parquetOutputPath, inputSchema)) {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/parquet/ParquetFileExampleGeneratorUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/parquet/ParquetFileExampleGeneratorUtil.java
new file mode 100644
index 0000000..501fb27
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/parquet/ParquetFileExampleGeneratorUtil.java
@@ -0,0 +1,210 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.test.external_dataset.parquet;
+
+import static org.apache.parquet.hadoop.metadata.CompressionCodecName.UNCOMPRESSED;
+import static org.apache.parquet.schema.MessageTypeParser.parseMessageType;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.time.Duration;
+import java.time.Instant;
+import java.time.LocalDate;
+import java.time.ZoneOffset;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.AUUIDSerializerDeserializer;
+import org.apache.asterix.om.base.AMutableUUID;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+import org.apache.parquet.column.ParquetProperties.WriterVersion;
+import org.apache.parquet.example.data.Group;
+import org.apache.parquet.example.data.simple.NanoTime;
+import org.apache.parquet.example.data.simple.SimpleGroupFactory;
+import org.apache.parquet.hadoop.ParquetWriter;
+import org.apache.parquet.hadoop.example.ExampleParquetWriter;
+import org.apache.parquet.hadoop.example.GroupWriteSupport;
+import org.apache.parquet.hadoop.util.HadoopOutputFile;
+import org.apache.parquet.io.OutputFile;
+import org.apache.parquet.io.PositionOutputStream;
+import org.apache.parquet.io.api.Binary;
+import org.apache.parquet.schema.MessageType;
+
+/**
+ * A generator of a parquet file that contains different specialized type
+ * Adopted from:
+ *
+ * @see <a href="https://github.com/apache/parquet-mr/blob/master/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriter.java">TestParquetWriter</a>
+ */
+public class ParquetFileExampleGeneratorUtil {
+    //Jan 1st 2022 01:00:00 UTC
+    private static final long TIME_MILLIS = TimeUnit.SECONDS.toMillis(1640998800);
+    private static final int TIME_DAYS = (int) TimeUnit.MILLISECONDS.toDays(TIME_MILLIS);
+    private static final int SINCE_MIDNIGHT_MILLIS = getSecondsSinceMidnight();
+
+    private static final int PST_OFFSET = TimeZone.getTimeZone("PST").getRawOffset();
+    private static final long PST_TIME_MILLIS = TimeUnit.SECONDS.toMillis(1640998800) + PST_OFFSET;
+    private static final int PST_TIME_DAYS = (int) TimeUnit.MILLISECONDS.toDays(PST_TIME_MILLIS);
+    private static final int PST_SINCE_MIDNIGHT_MILLIS = SINCE_MIDNIGHT_MILLIS + PST_OFFSET;
+    private static final int JULIAN_DAY_OF_EPOCH = 2440588;
+
+    private static final String FILE_NAME = "parquetTypes.parquet";
+
+    private static final String SCHEMA = "message test { \n" + "   required boolean boolean_field;\n"
+            + "   required int32 int8_field (INTEGER(8,true));\n"
+            + "   required int32 int16_field (INTEGER(16,true));\n" + "   required int32 int32_field;\n"
+            + "   required int64 int64_field;\n" + "   required int32 uint8_field (INTEGER(8,false));\n"
+            + "   required int32 uint16_field (INTEGER(16,false));\n"
+            + "   required int32 uint32_field (INTEGER(32,false));\n"
+            + "   required int64 uint64_field (INTEGER(64,false));\n"
+            + "   required int64 overflowed_uint64_field (INTEGER(64,false));\n" + "   required float float_field;\n"
+            + "   required double double_field;\n" + "   required int32 decimal32_field (DECIMAL(5, 4));\n"
+            + "   required int64 decimal64_field (DECIMAL(12, 9));\n"
+            + "   required fixed_len_byte_array(10) decimal_fixed80_field (DECIMAL(22,21));\n"
+            + "   required binary decimal_arbitrary_length_field (DECIMAL(22,21));\n"
+            + "   required binary binary_field;\n" + "   required binary string_field (UTF8);\n"
+            + "   required binary enum_field (ENUM);\n" + "   required binary json_field (JSON);\n"
+            + "   required int32 date_field (DATE);\n" + "   required int32 time32_millis_field (TIME(MILLIS, true));\n"
+            + "   required int64 time64_micros_field (TIME(MICROS, true));\n"
+            + "   required int64 time64_nanos_field (TIME(NANOS, true));\n"
+            + "   required int32 time32_millis_pst_field (TIME(MILLIS, false));\n"
+            + "   required int64 time64_micros_pst_field (TIME(MICROS, false));\n"
+            + "   required int64 time64_nanos_pst_field (TIME(NANOS, false));\n"
+            + "   required int64 timestamp64_millis_field (TIMESTAMP(MILLIS, true));\n"
+            + "   required int64 timestamp64_micros_field (TIMESTAMP(MICROS, true));\n"
+            + "   required int64 timestamp64_nanos_field (TIMESTAMP(NANOS, true));\n"
+            + "   required int64 timestamp64_millis_pst_field (TIMESTAMP(MILLIS, false));\n"
+            + "   required int64 timestamp64_micros_pst_field (TIMESTAMP(MICROS, false));\n"
+            + "   required int64 timestamp64_nanos_pst_field (TIMESTAMP(NANOS, false));\n"
+            + "   required int96 timestamp96_field;\n" + "   required fixed_len_byte_array(16) uuid_field (UUID);"
+            + "     required group mapField (MAP) {\n" + "   repeated group key_value {\n"
+            + "     required int32 key;\n" + "     required int32 value;\n" + "   }\n" + " }" + "}";
+
+    private ParquetFileExampleGeneratorUtil() {
+    }
+
+    public static void writeExample() throws IOException {
+        Configuration conf = new Configuration();
+        Path root = new Path(BinaryFileConverterUtil.BINARY_GEN_BASEDIR);
+        MessageType schema = parseMessageType(SCHEMA);
+        GroupWriteSupport.setSchema(schema, conf);
+        Path file = new Path(root, FILE_NAME);
+        ParquetWriter<Group> writer = ExampleParquetWriter.builder(new TestOutputFile(file, conf))
+                .withCompressionCodec(UNCOMPRESSED).withRowGroupSize(1024).withPageSize(1024)
+                .withDictionaryPageSize(512).enableDictionaryEncoding().withValidation(false)
+                .withWriterVersion(WriterVersion.PARQUET_2_0).withConf(conf).build();
+        SimpleGroupFactory groupFactory = new SimpleGroupFactory(schema);
+        Group message = groupFactory.newGroup().append("boolean_field", true).append("int8_field", 8)
+                .append("int16_field", 16).append("int32_field", 32).append("int64_field", 64L)
+                .append("uint8_field", Byte.MAX_VALUE + 1).append("uint16_field", Short.MAX_VALUE + 1)
+                .append("uint32_field", Integer.MAX_VALUE + 1).append("uint64_field", 151L)
+                .append("overflowed_uint64_field", Long.MAX_VALUE + 1).append("float_field", 1.0F)
+                .append("double_field", 1.0D).append("decimal32_field", getDecimal32())
+                .append("decimal64_field", getDecimal64()).append("decimal_fixed80_field", getDecimal80())
+                .append("decimal_arbitrary_length_field", getDecimal80()).append("binary_field", createConstantBinary())
+                .append("string_field", "stringVal").append("enum_field", "enumVal").append("json_field", "[1,2,3]")
+                .append("date_field", TIME_DAYS).append("time32_millis_field", SINCE_MIDNIGHT_MILLIS)
+                .append("time64_micros_field", TimeUnit.MILLISECONDS.toMicros(SINCE_MIDNIGHT_MILLIS))
+                .append("time64_nanos_field", TimeUnit.MILLISECONDS.toNanos(SINCE_MIDNIGHT_MILLIS))
+                .append("time32_millis_pst_field", PST_SINCE_MIDNIGHT_MILLIS)
+                .append("time64_micros_pst_field", TimeUnit.MILLISECONDS.toMicros(PST_SINCE_MIDNIGHT_MILLIS))
+                .append("time64_nanos_pst_field", TimeUnit.MILLISECONDS.toNanos(PST_SINCE_MIDNIGHT_MILLIS))
+                .append("timestamp64_millis_field", TIME_MILLIS)
+                .append("timestamp64_micros_field", TimeUnit.MILLISECONDS.toMicros(TIME_MILLIS))
+                .append("timestamp64_nanos_field", TimeUnit.MILLISECONDS.toNanos(TIME_MILLIS))
+                .append("timestamp64_millis_pst_field", PST_TIME_MILLIS)
+                .append("timestamp64_micros_pst_field", TimeUnit.MILLISECONDS.toMicros(PST_TIME_MILLIS))
+                .append("timestamp64_nanos_pst_field", TimeUnit.MILLISECONDS.toNanos(PST_TIME_MILLIS))
+                .append("timestamp96_field",
+                        new NanoTime(PST_TIME_DAYS + JULIAN_DAY_OF_EPOCH,
+                                TimeUnit.MILLISECONDS.toNanos(PST_SINCE_MIDNIGHT_MILLIS)))
+                .append("uuid_field", createUUIDBinary());
+        Group mapField = message.addGroup("mapField");
+        mapField.addGroup("key_value").append("key", 1).append("value", 1);
+        writer.write(message);
+        writer.close();
+    }
+
+    private static int getSecondsSinceMidnight() {
+        Instant instant = Instant.ofEpochMilli(TIME_MILLIS);
+        Instant midnight = LocalDate.ofInstant(instant, ZoneOffset.UTC).atStartOfDay().toInstant(ZoneOffset.UTC);
+        return (int) Duration.between(midnight, instant).toMillis();
+    }
+
+    private static int getDecimal32() {
+        BigDecimal decimal = new BigDecimal("1.1000");
+        return decimal.unscaledValue().intValue();
+    }
+
+    private static long getDecimal64() {
+        BigDecimal decimal = new BigDecimal("154.000000001");
+        return decimal.unscaledValue().longValue();
+    }
+
+    private static Binary getDecimal80() {
+        BigDecimal decimal = new BigDecimal("9.223372036854775800001");
+        return Binary.fromConstantByteArray(decimal.unscaledValue().toByteArray());
+    }
+
+    private static Binary createConstantBinary() {
+        byte[] binaryBytes = { 0x00, 0x01, 0x02 };
+        return Binary.fromConstantByteArray(binaryBytes);
+    }
+
+    private static Binary createUUIDBinary() throws HyracksDataException {
+        char[] digit = "123e4567-e89b-12d3-a456-426614174000".toCharArray();
+        AMutableUUID uuid = new AMutableUUID();
+        uuid.parseUUIDString(digit, 0, digit.length);
+        ArrayBackedValueStorage storage = new ArrayBackedValueStorage();
+        AUUIDSerializerDeserializer.INSTANCE.serialize(uuid, storage.getDataOutput());
+        return Binary.fromConstantByteArray(storage.getByteArray(), 0, storage.getLength());
+    }
+
+    private static class TestOutputFile implements OutputFile {
+
+        private final OutputFile outputFile;
+
+        TestOutputFile(Path path, Configuration conf) throws IOException {
+            outputFile = HadoopOutputFile.fromPath(path, conf);
+        }
+
+        @Override
+        public PositionOutputStream create(long blockSizeHint) throws IOException {
+            return outputFile.create(blockSizeHint);
+        }
+
+        @Override
+        public PositionOutputStream createOrOverwrite(long blockSizeHint) throws IOException {
+            return outputFile.createOrOverwrite(blockSizeHint);
+        }
+
+        @Override
+        public boolean supportsBlockSize() {
+            return outputFile.supportsBlockSize();
+        }
+
+        @Override
+        public long defaultBlockSize() {
+            return outputFile.defaultBlockSize();
+        }
+    }
+}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
index 4c30d2b..19a9253 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
@@ -18,14 +18,14 @@
  */
 package org.apache.asterix.test.runtime;
 
-import static org.apache.asterix.test.external_dataset.BinaryFileConverterUtil.BINARY_GEN_BASEDIR;
-import static org.apache.asterix.test.external_dataset.BinaryFileConverterUtil.DEFAULT_PARQUET_SRC_PATH;
+import static org.apache.asterix.test.external_dataset.parquet.BinaryFileConverterUtil.BINARY_GEN_BASEDIR;
+import static org.apache.asterix.test.external_dataset.parquet.BinaryFileConverterUtil.DEFAULT_PARQUET_SRC_PATH;
 
 import java.io.File;
 import java.io.IOException;
 
 import org.apache.asterix.external.dataset.adapter.GenericAdapter;
-import org.apache.asterix.test.external_dataset.BinaryFileConverterUtil;
+import org.apache.asterix.test.external_dataset.parquet.BinaryFileConverterUtil;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.1.ddl.sqlpp
similarity index 58%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.1.ddl.sqlpp
index 946b10c..ecf866e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.1.ddl.sqlpp
@@ -16,19 +16,31 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
 
-import org.apache.hyracks.data.std.api.IValueReference;
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+USE test;
+
+
+CREATE TYPE ParquetType as {
+};
+
+CREATE EXTERNAL DATASET ParquetDataset(ParquetType) USING %adapter%
+(
+  %template%,
+  ("container"="playground"),
+  ("definition"="parquet-data/reviews"),
+  ("include"="*parquetTypes.parquet"),
+  ("format" = "parquet"),
+  ("decimal-to-double" = "true")
+);
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
-}
+CREATE EXTERNAL DATASET ParquetDataset2(ParquetType) USING %adapter%
+(
+  %template%,
+  ("container"="playground"),
+  ("definition"="parquet-data/reviews"),
+  ("include"="*parquetTypes.parquet"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.2.query.sqlpp
similarity index 64%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.2.query.sqlpp
index 946b10c..1bfd2df 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.2.query.sqlpp
@@ -16,19 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+/*
+* Description  : Test Parquet's decimal types
+* Expected Res : Success with a warning about precision loss
+* Date         : Jan 27th 2022
+*/
 
-import org.apache.hyracks.data.std.api.IValueReference;
+-- param max-warnings:json=1000
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+USE test;
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
-}
+SELECT p.decimal32_field,
+       p.decimal64_field,
+       p.decimal_fixed80_field,
+       p.decimal_arbitrary_length_field
+FROM ParquetDataset p
\ No newline at end of file
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.3.query.sqlpp
similarity index 64%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.3.query.sqlpp
index 946b10c..5cdff29 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/decimal/decimal.3.query.sqlpp
@@ -16,19 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+/*
+* Description  : Test Parquet's decimal types
+* Expected Res : Error decimal-to-double is not enabled
+* Date         : Jan 27th 2022
+*/
 
-import org.apache.hyracks.data.std.api.IValueReference;
+-- param max-warnings:json=1000
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+USE test;
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
-}
+SELECT p.decimal32_field
+FROM ParquetDataset2 p
\ No newline at end of file
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.1.ddl.sqlpp
similarity index 64%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.1.ddl.sqlpp
index 946b10c..03af660 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.1.ddl.sqlpp
@@ -16,19 +16,22 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
 
-import org.apache.hyracks.data.std.api.IValueReference;
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+USE test;
+
+
+CREATE TYPE ParquetType as {
+};
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
-}
+CREATE EXTERNAL DATASET ParquetDataset(ParquetType) USING %adapter%
+(
+  %template%,
+  ("container"="playground"),
+  ("definition"="parquet-data/reviews"),
+  ("include"="*parquetTypes.parquet"),
+  ("format" = "parquet"),
+  ("parse-json-string" = "false")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.2.query.sqlpp
similarity index 64%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.2.query.sqlpp
index 946b10c..310c9ed 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.2.query.sqlpp
@@ -16,19 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+/*
+* Description  : Test Parquet's json type
+* Expected Res : Success
+* Date         : Jan 27th 2022
+*/
 
-import org.apache.hyracks.data.std.api.IValueReference;
+-- param max-warnings:json=1000
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+USE test;
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
-}
+SELECT p.json_field
+FROM ParquetDataset p
\ No newline at end of file
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/invalid-timezone/temporal.1.ddl.sqlpp
similarity index 64%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/invalid-timezone/temporal.1.ddl.sqlpp
index 946b10c..5c202c3 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/invalid-timezone/temporal.1.ddl.sqlpp
@@ -16,19 +16,22 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
 
-import org.apache.hyracks.data.std.api.IValueReference;
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+USE test;
+
+
+CREATE TYPE ParquetType as {
+};
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
-}
+CREATE EXTERNAL DATASET ParquetDataset(ParquetType) USING %adapter%
+(
+  %template%,
+  ("container"="playground"),
+  ("definition"="parquet-data/reviews"),
+  ("include"="*parquetTypes.parquet"),
+  ("format" = "parquet"),
+  ("timezone" = "invalid-timezone")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/temporal/temporal.1.ddl.sqlpp
similarity index 64%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/temporal/temporal.1.ddl.sqlpp
index 946b10c..220eb91 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/temporal/temporal.1.ddl.sqlpp
@@ -16,19 +16,22 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
 
-import org.apache.hyracks.data.std.api.IValueReference;
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+USE test;
+
+
+CREATE TYPE ParquetType as {
+};
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
-}
+CREATE EXTERNAL DATASET ParquetDataset(ParquetType) USING %adapter%
+(
+  %template%,
+  ("container"="playground"),
+  ("definition"="parquet-data/reviews"),
+  ("include"="*parquetTypes.parquet"),
+  ("format" = "parquet"),
+  ("timezone" = "PST")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/temporal/tempral.2.query.sqlpp
similarity index 55%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/temporal/tempral.2.query.sqlpp
index 946b10c..3d19e92 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/temporal/tempral.2.query.sqlpp
@@ -16,19 +16,28 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+/*
+* Description  : Test Parquet's temporal types (UTC values are adjusted to PST)
+* Expected Res : Success
+* Date         : Jan 27th 2022
+*/
 
-import org.apache.hyracks.data.std.api.IValueReference;
+-- param max-warnings:json=1000
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+USE test;
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
-}
+SELECT p.date_field,
+       p.time32_millis_field,
+       p.time64_micros_field,
+       p.time64_nanos_field,
+       p.time32_millis_pst_field,
+       p.time64_micros_pst_field,
+       p.time64_nanos_pst_field,
+       p.timestamp64_millis_field,
+       p.timestamp64_micros_field,
+       p.timestamp64_nanos_field,
+       p.timestamp64_millis_pst_field,
+       p.timestamp64_micros_pst_field,
+       p.timestamp64_nanos_pst_field,
+       p.timestamp96_field
+FROM ParquetDataset p
\ No newline at end of file
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.1.ddl.sqlpp
similarity index 64%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.1.ddl.sqlpp
index 946b10c..5f5f661 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.1.ddl.sqlpp
@@ -16,19 +16,21 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
 
-import org.apache.hyracks.data.std.api.IValueReference;
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+USE test;
+
+
+CREATE TYPE ParquetType as {
+};
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
-}
+CREATE EXTERNAL DATASET ParquetDataset(ParquetType) USING %adapter%
+(
+  %template%,
+  ("container"="playground"),
+  ("definition"="parquet-data/reviews"),
+  ("include"="*parquetTypes.parquet"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.2.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.2.query.sqlpp
new file mode 100644
index 0000000..80a4cc2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.2.query.sqlpp
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Test Parquet types except for decimals
+* Expected Res : Success with warnings
+* Date         : November 1st 2021
+*/
+
+-- param max-warnings:json=1000
+
+USE test;
+
+SELECT p.boolean_field,
+       p.int8_field,
+       p.int16_field,
+       p.int32_field,
+       p.int64_field,
+       p.uint8_field,
+       p.uint16_field,
+       p.uint32_field,
+       p.uint64_field,
+       p.overflowed_uint64_field,
+       p.float_field,
+       p.double_field,
+       p.binary_field,
+       p.string_field,
+       p.enum_field,
+       p.json_field,
+       p.date_field,
+       p.time32_millis_field,
+       p.time64_micros_field,
+       p.time64_nanos_field,
+       p.time32_millis_pst_field,
+       p.time64_micros_pst_field,
+       p.time64_nanos_pst_field,
+       p.timestamp64_millis_field,
+       p.timestamp64_micros_field,
+       p.timestamp64_nanos_field,
+       p.timestamp64_millis_pst_field,
+       p.timestamp64_micros_pst_field,
+       p.timestamp64_nanos_pst_field,
+       p.timestamp96_field,
+       p.uuid_field
+FROM ParquetDataset p
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/decimal/decimal.02.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/decimal/decimal.02.adm
new file mode 100644
index 0000000..9f8b991
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/decimal/decimal.02.adm
@@ -0,0 +1 @@
+{ "decimal32_field": 1.1, "decimal64_field": 154.000000001, "decimal_fixed80_field": 9.223372036854776, "decimal_arbitrary_length_field": 9.223372036854776 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.02.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.02.adm
new file mode 100644
index 0000000..86fc647
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/disable-json-parsing/disable-json-parsing.02.adm
@@ -0,0 +1 @@
+{ "json_field": "[1,2,3]" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/temporal/temporal.02.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/temporal/temporal.02.adm
new file mode 100644
index 0000000..b1ddb13
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/temporal/temporal.02.adm
@@ -0,0 +1 @@
+{ "date_field": date("2022-01-01"), "time32_millis_field": time("01:00:00.000"), "time64_micros_field": time("01:00:00.000"), "time64_nanos_field": time("01:00:00.000"), "time32_millis_pst_field": time("17:00:00.000"), "time64_micros_pst_field": time("17:00:00.000"), "time64_nanos_pst_field": time("17:00:00.000"), "timestamp64_millis_field": datetime("2021-12-31T17:00:00.000"), "timestamp64_micros_field": datetime("2021-12-31T17:00:00.000"), "timestamp64_nanos_field": datetime("2021-12-3 [...]
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.02.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.02.adm
new file mode 100644
index 0000000..c42147b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/common/parquet/parquet-types/unset-flags/unset-flags.02.adm
@@ -0,0 +1 @@
+{ "boolean_field": true, "int8_field": 8, "int16_field": 16, "int32_field": 32, "int64_field": 64, "uint8_field": 128, "uint16_field": 32768, "uint32_field": 2147483648, "uint64_field": 151, "float_field": 1.0, "double_field": 1.0, "binary_field": hex("000102"), "string_field": "stringVal", "enum_field": "enumVal", "json_field": [ 1, 2, 3 ], "date_field": date("2022-01-01"), "time32_millis_field": time("01:00:00.000"), "time64_micros_field": time("01:00:00.000"), "time64_nanos_field": ti [...]
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml
index bacc23b..a8786e2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml
@@ -172,6 +172,46 @@
         <expected-warn>The provided external dataset configuration returned no files from the external source</expected-warn>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="external-dataset" check-warnings="true">
+      <compilation-unit name="common/parquet/parquet-types/unset-flags">
+        <placeholder name="adapter" value="S3" />
+        <output-dir compare="Text">common/parquet/parquet-types/unset-flags</output-dir>
+        <source-location>false</source-location>
+        <expected-warn>Parquet file(s) contain unsigned integer that is larger than the 'bigint' range</expected-warn>
+        <expected-warn>Parquet file(s) contain values of the temporal type 'datetime' that are adjusted to UTC. Recreate the external dataset and set the option 'timezone' to get the local-adjusted 'datetime' value</expected-warn>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="external-dataset" check-warnings="true">
+      <compilation-unit name="common/parquet/parquet-types/decimal">
+        <placeholder name="adapter" value="S3" />
+        <output-dir compare="Text">common/parquet/parquet-types/decimal</output-dir>
+        <source-location>false</source-location>
+        <expected-error>ASX0054: Parquet type 'required int32 decimal32_field (DECIMAL(5,4))' is not supported by default. To enable type conversion, recreate the external dataset with the option 'decimal-to-double' enabled</expected-error>
+        <expected-warn>Parquet decimal precision loss: precision '22' is greater than the maximum supported precision '20'</expected-warn>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="external-dataset" check-warnings="true">
+      <compilation-unit name="common/parquet/parquet-types/temporal">
+        <placeholder name="adapter" value="S3" />
+        <output-dir compare="Text">common/parquet/parquet-types/temporal</output-dir>
+        <source-location>false</source-location>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="external-dataset">
+      <compilation-unit name="common/parquet/parquet-types/invalid-timezone">
+        <placeholder name="adapter" value="S3" />
+        <output-dir compare="Text">common/parquet/parquet-types/invalid-timezone</output-dir>
+        <source-location>false</source-location>
+        <expected-error>Provided timezone is invalid: 'invalid-timezone'</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="external-dataset" check-warnings="true">
+      <compilation-unit name="common/parquet/parquet-types/disable-json-parsing">
+        <placeholder name="adapter" value="S3" />
+        <output-dir compare="Text">common/parquet/parquet-types/disable-json-parsing</output-dir>
+        <source-location>false</source-location>
+      </compilation-unit>
+    </test-case>
     <!-- Parquet Tests End -->
     <test-case FilePath="external-dataset">
       <compilation-unit name="common/empty-string-definition">
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
index 8939ce0..068c125 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
@@ -80,6 +80,11 @@ public enum ErrorCode implements IError {
     INVALID_PARAM(50),
     INCOMPARABLE_TYPES(51),
     ILLEGAL_STATE(52),
+    UNSUPPORTED_PARQUET_TYPE(53),
+    PARQUET_SUPPORTED_TYPE_WITH_OPTION(54),
+    PARQUET_DECIMAL_TO_DOUBLE_PRECISION_LOSS(55),
+    PARQUET_TIME_ZONE_ID_IS_NOT_SET(56),
+    PARQUET_CONTAINS_OVERFLOWED_BIGINT(57),
 
     UNSUPPORTED_JRE(100),
 
@@ -257,6 +262,7 @@ public enum ErrorCode implements IError {
     COMPILATION_SUBQUERY_COERCION_ERROR(1169),
     S3_REGION_NOT_SUPPORTED(1170),
     COMPILATION_SET_OPERATION_ERROR(1171),
+    INVALID_TIMEZONE(1172),
 
     // Feed errors
     DATAFLOW_ILLEGAL_STATE(3001),
diff --git a/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties b/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
index 5120908..faaf8d5 100644
--- a/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
+++ b/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
@@ -87,6 +87,11 @@
 50 = Invalid parameter \"%1$s\"
 #51 is used
 52 = Illegal state. %1$s
+53 = Unsupported Parquet type '%1$s'
+54 = Parquet type '%1$s' is not supported by default. To enable type conversion, recreate the external dataset with the option '%2$s' enabled
+55 = Parquet decimal precision loss: precision '%1$s' is greater than the maximum supported precision '%2$s'
+56 = Parquet file(s) contain values of the temporal type '%1$s' that are adjusted to UTC. Recreate the external dataset and set the option '%2$s' to get the local-adjusted '%1$s' value
+57 = Parquet file(s) contain unsigned integer that is larger than the '%1$s' range
 
 100 = Unsupported JRE: %1$s
 
@@ -259,6 +264,7 @@
 1169 = Unable to do subquery coercion. %1$s
 1170 = Provided S3 region is not supported: '%1$s'
 1171 = Unable to process %1$s clause. %2$s
+1172 = Provided timezone is invalid: '%1$s'
 
 # Feed Errors
 3001 = Illegal state.
diff --git a/asterixdb/asterix-doc/src/site/markdown/sqlpp/parquet.md b/asterixdb/asterix-doc/src/site/markdown/sqlpp/parquet.md
new file mode 100644
index 0000000..c31ca50
--- /dev/null
+++ b/asterixdb/asterix-doc/src/site/markdown/sqlpp/parquet.md
@@ -0,0 +1,363 @@
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements.  See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership.  The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in compliance
+ ! with the License.  You may obtain a copy of the License at
+ !
+ !   http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied.  See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+
+# Querying Parquet Files #
+
+## <a id="toc">Table of Contents</a> ##
+
+* [Overview](#Overview)
+* [DDL](#DDL)
+* [Query Parquet Files](#QueryParquetFiles)
+* [Type Compatibility](#TypeCompatibility)
+* [Parquet Type Flags](#ParquetTypeFlags)
+
+## <a id="Overview">Overview</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+
+[Apache Parquet](https://parquet.apache.org/) is a columnar file format for storing semi-structured data (like JSON).
+Apache AsterixDB supports running queries against Parquet files that are stored in Amazon S3 and Microsoft Azure Blob
+Storage as [External Datasets](../aql/externaldata.html).
+
+## <a id="DDL">DDL</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+
+To start, an end-user needs to create a type as follows:
+
+    -- The type should not contain any declared fields
+    CREATE TYPE ParquetType AS {
+    }
+
+Note that the created type does not have any declared fields. The reason is that Parquet files embed the schema within
+each file. Thus, no type is needed to be declared, and it is up to AsterixDB to read each file's schema. If the created
+type contains any declared type, AsterixDB will throw an error:
+
+    Type 'ParquetType' contains declared fields, which is not supported for 'parquet' format
+
+Next, the user can create an external dataset - using the declared type - as follows:
+
+### Amazon S3
+
+    CREATE EXTERNAL DATASET ParquetDataset(ParquetType) USING S3
+    (
+        -- Replace <ACCESS-KEY> with your access key
+        ("accessKeyId"="<ACCESS-KEY>"),
+
+        -- Replace <SECRET-ACCESS-KEY> with your access key
+        ("secretAccessKey" = "<SECRET-ACCESS-KEY>"),
+
+        -- S3 bucket
+        ("container"="parquetBucket"),
+
+        -- Path to the parquet files within the bucket
+        ("definition"="path/to/parquet/files"),
+
+        -- Specifying the format as parquet
+        ("format" = "parquet")
+    );
+
+### Microsoft Azure Blob Storage
+
+    CREATE EXTERNAL DATASET ParquetDataset(ParquetType) USING AZUREBLOB
+    (
+        -- Replace <ACCOUNT-NAME> with your account name
+        ("accountName"="<ACCOUNT-NAME>"),
+
+        -- Replace <ACCOUNT-KEY> with your account key
+        ("accountKey"="<ACCOUNT-KEY>"),
+
+        -- Azure Blob container
+        ("container"="parquetContainer"),
+
+        -- Path to the parquet files within the bucket
+        ("definition"="path/to/parquet/files"),
+
+        -- Specifying the format as parquet
+        ("format" = "parquet")
+    );
+
+<i><b>Additional setting/properties could be set as detailed later in [Parquet Type Flags](#ParquetTypeFlags)</b></i>
+
+## <a id="QueryParquetFiles">Query Parquet Files</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+
+To query the data stored in Parquet files, one can simply write a query against the created External Dataset. For
+example:
+
+    SELECT COUNT(*)
+    FROM ParquetDataset;
+
+Another example:
+
+    SELECT pd.age, COUNT(*) cnt
+    FROM ParquetDataset pd
+    GROUP BY pd.age;
+
+## <a id="TypeCompatibility">Type Compatibility</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+
+AsterixDB supports Parquet's generic types such `STRING`, `INT` and `DOUBLE`. However, Parquet files could
+contain [additional types](https://github.com/apache/parquet-format/blob/master/LogicalTypes.md) such as `DATE` and
+`DATETIME` like types. The following table show the type mapping between Apache Parquet and AsterixDB:
+
+<table>
+    <thead>
+        <tr>
+            <th>Parquet</th>
+            <th>AsterixDB</th>
+            <th>Value Examples</th>
+            <th>Comment</th>
+        </tr>
+    </thead>
+    <tbody>
+        <tr>
+            <td><code>BOOLEAN</code></td>
+            <td><code>BOOLEAN</code></td>
+            <td><code>true</code> / <code>false</code></td>
+            <td>-</td>
+        </tr>
+        <tr>
+            <td><code>INT_8</code></td>
+            <td rowspan="8"><code>BIGINT</code></td>
+            <td rowspan="8">
+                AsterixDB <code>BIGINT</code> Range:
+                <ul>
+                    <li><b>Min</b>:-9,223,372,036,854,775,808</li>
+                    <li><b>Max</b>: 9,223,372,036,854,775,807</li>
+                </ul>
+            </td>
+            <td rowspan="7">-</td>
+        </tr>
+        <tr>
+            <td><code>INT_16</code></td>
+        </tr>
+        <tr>
+            <td><code>INT_32</code></td>
+        </tr>
+        <tr>
+            <td><code>INT_64</code></td>
+        </tr>
+        <tr>
+            <td><code>UNIT_8</code></td>
+        </tr>
+        <tr>
+            <td><code>UINT_16</code></td>
+        </tr>
+        <tr>
+            <td><code>UINT_32</code></td>
+        </tr>
+        <tr>
+            <td><code>UINT_64</code></td>
+            <td>There is a possibility that a value overflows. A warning will be issued in case of an overflow and
+                <code>MISSING</code> would be returned.
+            </td>
+        </tr>
+        <tr>
+            <td><code>FLOAT</code></td>
+            <td rowspan="4"><code>DOUBLE</code></td>
+            <td rowspan="4">
+                AsterixDB <code>DOUBLE</code> Range:
+                <ul>
+                    <li><b>Min Positive Value</b>: 2^-1074</li>
+                    <li><b>Max Positive Value</b>: 2^1023</li>
+                </ul>
+            </td>
+            <td rowspan="2">-</td>
+        </tr>
+        <tr>
+            <td><code>DOUBLE</code></td>
+        </tr>
+        <tr>
+            <td><code>FIXED_LEN_BYTE_ARRAY (DECIMAL)</code></td>
+            <td rowspan="2">
+                Parquet <code>DECIMAL</code> values are converted to doubles, with the possibility of precision loss.
+                The flag <code>decimal-to-double</code> must be set upon creating the dataset.
+                <ul><li><i>See <a href ="#ParquetTypeFlags">Parquet Type Flags</a></i></li></ul>
+            </td>
+        </tr>
+        <tr>
+            <td><code>BINARY (DECIMAL)</code></td>
+        </tr>
+        <tr>
+            <td><code>BINARY (ENUM)</code></td>
+            <td><code>"Fruit"</code></td>
+            <td>Parquet Enum values are parsed as Strings</td>
+        </tr>
+        <tr>
+            <td><code>BINARY (UTF8)</code></td>
+            <td><code>STRING</code></td>
+            <td><code>"Hello World"</code></td>
+            <td>-</td>
+        </tr>
+        <tr>
+            <td><code>FIXED_LEN_BYTE_ARRAY (UUID)</code></td>
+            <td><code>UUID</code></td>
+            <td><code>uuid("123e4567-e89b-12d3-a456-426614174000")</code></td>
+            <td>-</td>
+        </tr>
+        <tr>
+            <td><code>INT_32 (DATE)</code></td>
+            <td><code>DATE</code></td>
+            <td><code>date("2021-11-01")</code></td>
+            <td>-</td>
+        </tr>
+        <tr>
+            <td><code>INT_32 (TIME)</code></td>
+            <td><code>TIME</code></td>
+            <td rowspan="2"><code>time("00:00:00.000")</code></td>
+            <td>Time in milliseconds.</td>
+        </tr>
+        <tr>
+            <td><code>INT_64 (TIME)</code></td>
+            <td><code>TIME</code></td>
+            <td>Time in micro/nano seconds.</td>
+        </tr>
+        <tr>
+            <td><code>INT_64 (TIMESTAMP)</code></td>
+            <td rowspan="2"><code>DATETIME</code></td>
+            <td rowspan="2"><code>datetime("2021-11-01T21:37:13.738")"</code></td>
+            <td>Timestamp in milli/micro/nano seconds. Parquet also can store the timestamp values with the option
+                <code>isAdjustedToUTC = true</code>. To get the local timestamp value, the user can set the time zone ID 
+                by setting the value using the option <code>timezone</code> to get the local <code>DATETIME</code> value.
+                <ul><li><i>See <a href ="#ParquetTypeFlags">Parquet Type Flags</a></i></li></ul>
+            </td>
+        </tr>
+        <tr>
+            <td><code>INT96</code></td>
+            <td>A timestamp values that separate days and time to form a timestamp. INT96 is always in localtime.</td>
+        </tr>
+        <tr>
+            <td><code>BINARY (JSON)</code></td>
+            <td>any type</td>
+            <td>
+                <ul>
+                    <li><code>{"name": "John"}</code></li>
+                    <li><code>[1, 2, 3]</code></li>
+                </ul> 
+            </td>
+            <td>
+                Parse JSON string into internal AsterixDB value.
+                The flag <code>parse-json-string</code> is set by default. To get the string value (i.e., not parsed as
+                AsterixDB value), unset the flag <code>parse-json-string</code>.
+                <ul><li><i>See <a href ="#ParquetTypeFlags">Parquet Type Flags</a></i></li></ul>
+            </td>
+        </tr>
+        <tr>
+            <td><code>BINARY</code></td>
+            <td rowspan="2"><code>BINARY</code></td>
+            <td><code>hex("0101FF")</code></td>
+            <td>-</td>
+        </tr>
+        <tr>
+            <td><code>BSON</code></td>
+            <td>N/A</td>
+            <td>BSON values will be returned as <code>BINARY</code></td>
+        </tr>
+        <tr>
+            <td><code>LIST</code></td>
+            <td><code>ARRAY</code></td>
+            <td><code>[1, 2, 3]</code></td>
+            <td>Parquet's <code>LIST</code> type is converted into <code>ARRAY</code></td>
+        </tr>
+        <tr>
+            <td><code>MAP</code></td>
+            <td><code>ARRAY</code> of <code>OBJECT</code></td>
+            <td><code>[{"key":1, "value":1}, {"key":2, "value":2}]</code></td>
+            <td>Parquet's <code>MAP</code> types are converted into an <code>ARRAY</code> of <code>OBJECT</code>. Each 
+                <code>OBJECT</code> value consists of two fields: <code>key</code> and <code>value</code>
+            </td>
+        </tr>
+        <tr>
+            <td><code>FIXED_LEN_BYTE_ARRAY (INTERVAL)</code></td>
+            <td>-</td>
+            <td>N/A</td>
+            <td><code>INTERVAL</code> is not supported. A warning will be issued and <code>MISSING</code> value
+                will be returned.
+            </td>
+        </tr>
+    </tbody>
+</table>
+
+## <a id="ParquetTypeFlags">Parquet Type Flags</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+
+The table in [Type Compatibility](#TypeCompatibility) shows the type mapping between Parquet and AsterixDB. Some of the
+Parquet types are not parsed by default as those type are not natively supported in AsterixDB. However, the user can set
+a flag to convert some of those types into a supported AsterixDB type.
+
+##### DECIMAL TYPE
+
+The user can enable parsing `DECIMAL` Parquet values by enabling a certain flag as in the following example:
+
+    CREATE EXTERNAL DATASET ParquetDataset(ParquetType) USING S3
+    (
+        -- Credintials and path to Parquet files
+        ...
+
+        -- Enable converting decimal values to double
+        ("decimal-to-double" = "true")
+    );
+
+This flag will enable parsing/converting `DECIMAL` values/types into `DOUBLE`. For example, if the flag
+`decimal-to-double` is not set and a Parquet file contains a `DECIMAL` value, the following error will be thrown when
+running a query that request a `DECIMAL` value:
+
+    Parquet type "optional fixed_len_byte_array(16) decimalType (DECIMAL(38,18))" is not supported by default. To enable type conversion, recreate the external dataset with the option "decimal-to-double" enabled
+
+and the returned value will be `MISSING`. If the flag `decimal-to-double` is set, the converted `DOUBLE` value will be
+returned.
+
+##### TEMPORAL TYPES
+
+For the temporal types (namely `DATETIME`), their values could be stored in Parquet with the option
+`isAdjustedToUTC = true`. Hence, the user has to provide the timezone ID to adjust their values to the local value by
+setting the flag `timezone`. To do so, a user can set the timezone ID to "<b>PST</b>" upon creating a dataset as in the
+following example:
+
+    CREATE EXTERNAL DATASET ParquetDataset(ParquetType) USING S3
+    (
+        -- Credintials and path to Parquet files
+        ...
+
+        -- Converting UTC time to PST time
+        ("timezone" = "PST")
+    );
+
+If the flag `timezone` is not set, a warning will appear when running a query:
+
+    Parquet file(s) contain "datetime" values that are adjusted to UTC. Recreate the external dataset and set "timezone" to get the local "datetime" value.
+
+and the UTC `DATETIME` will be returned.
+
+##### JSON TYPE
+
+By default, we parse the JSON values into AsterixDB values, where a user can process those values using `SQL++` queries.
+However, one could disable the parsing of JSON string values (which stored as `STRING`) by unsetting the flag
+`parseJsonString` as in the following example:
+
+    CREATE EXTERNAL DATASET ParquetDataset(ParquetType) USING S3
+    (
+        -- Credintials and path to Parquet files
+        ...
+
+        -- Stop parsing JSON string values
+        ("parse-json-string" = "false")
+    );
+
+And the returned value will be of type `STRING`.
+
+##### INTERVAL TYPE
+
+Currently, AsterixDB do not support Parquet's `INTERVAL` type. When a query requests (or projects) an `INTERVAL` value,
+a warning will be issued and `MISSING` value will be returned instead.
+    
\ No newline at end of file
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AsterixParquetRuntimeException.java
similarity index 64%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AsterixParquetRuntimeException.java
index 946b10c..0ee342a 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AsterixParquetRuntimeException.java
@@ -18,17 +18,17 @@
  */
 package org.apache.asterix.external.input.record.reader.hdfs.parquet;
 
-import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+public class AsterixParquetRuntimeException extends RuntimeException {
+    private static final long serialVersionUID = 6896076874677689992L;
+    private final HyracksDataException hyracksDataException;
+
+    public AsterixParquetRuntimeException(HyracksDataException e) {
+        this.hyracksDataException = e;
+    }
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
+    public HyracksDataException getHyracksDataException() {
+        return hyracksDataException;
+    }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AsterixTypeToParquetTypeVisitor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AsterixTypeToParquetTypeVisitor.java
index c0a47d5..7258359 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AsterixTypeToParquetTypeVisitor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AsterixTypeToParquetTypeVisitor.java
@@ -18,10 +18,16 @@
  */
 package org.apache.asterix.external.input.record.reader.hdfs.parquet;
 
-import java.util.ArrayList;
-import java.util.List;
+import static org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve.PrimitiveConverterProvider.MISSING;
+
 import java.util.Map;
 
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.exceptions.RuntimeDataException;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve.DecimalConverter;
+import org.apache.asterix.external.util.ExternalDataConstants.ParquetOptions;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.AUnionType;
@@ -30,11 +36,19 @@ import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.types.IATypeVisitor;
 import org.apache.asterix.runtime.projection.DataProjectionInfo;
 import org.apache.asterix.runtime.projection.FunctionCallInformation;
+import org.apache.hyracks.api.exceptions.SourceLocation;
 import org.apache.hyracks.api.exceptions.Warning;
 import org.apache.parquet.schema.GroupType;
 import org.apache.parquet.schema.LogicalTypeAnnotation;
+import org.apache.parquet.schema.LogicalTypeAnnotation.DateLogicalTypeAnnotation;
+import org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation;
+import org.apache.parquet.schema.LogicalTypeAnnotation.IntLogicalTypeAnnotation;
+import org.apache.parquet.schema.LogicalTypeAnnotation.TimeLogicalTypeAnnotation;
+import org.apache.parquet.schema.LogicalTypeAnnotation.TimestampLogicalTypeAnnotation;
+import org.apache.parquet.schema.LogicalTypeAnnotation.UUIDLogicalTypeAnnotation;
 import org.apache.parquet.schema.MessageType;
 import org.apache.parquet.schema.PrimitiveType;
+import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
 import org.apache.parquet.schema.Type;
 import org.apache.parquet.schema.Types;
 
@@ -43,14 +57,12 @@ import org.apache.parquet.schema.Types;
  */
 public class AsterixTypeToParquetTypeVisitor implements IATypeVisitor<Type, Type> {
     public static final MessageType EMPTY_PARQUET_MESSAGE = Types.buildMessage().named("EMPTY");
-    public static final PrimitiveType MISSING =
-            Types.optional(PrimitiveType.PrimitiveTypeName.BOOLEAN).named("MISSING");
 
-    private final List<Warning> warnings;
+    private final ParquetConverterContext context;
     private Map<String, FunctionCallInformation> funcInfo;
 
-    public AsterixTypeToParquetTypeVisitor() {
-        warnings = new ArrayList<>();
+    public AsterixTypeToParquetTypeVisitor(ParquetConverterContext context) {
+        this.context = context;
     }
 
     public MessageType clipType(ARecordType rootType, MessageType fileSchema,
@@ -66,10 +78,6 @@ public class AsterixTypeToParquetTypeVisitor implements IATypeVisitor<Type, Type
         return builder.named(fileSchema.getName());
     }
 
-    public List<Warning> getWarnings() {
-        return warnings;
-    }
-
     @Override
     public Type visit(ARecordType recordType, Type arg) {
         //No LogicalTypeAnnotation for Object types
@@ -86,8 +94,7 @@ public class AsterixTypeToParquetTypeVisitor implements IATypeVisitor<Type, Type
 
     /**
      * There are two ways for representing arrays using ProtoBuf schema see the example in
-     * {@link org.apache.asterix.external.input.record.reader.hdfs.parquet.AbstractComplexConverter} for more
-     * information.
+     * {@link AbstractComplexConverter} for more information.
      */
     @Override
     public Type visit(AbstractCollectionType collectionType, Type arg) {
@@ -99,7 +106,7 @@ public class AsterixTypeToParquetTypeVisitor implements IATypeVisitor<Type, Type
         Type childType = arrayType.getType(0);
         if ("array".equals(childType.getName()) || childType.asGroupType().getFieldCount() > 1) {
             //Handle Avro-like schema
-            return handleHandleAvroArray(collectionType, arrayType);
+            return handleAvroArray(collectionType, arrayType);
         }
         //Handling spark-like schema
         Types.ListBuilder<GroupType> builder = Types.list(arg.getRepetition());
@@ -128,7 +135,7 @@ public class AsterixTypeToParquetTypeVisitor implements IATypeVisitor<Type, Type
         return numberOfAddedFields;
     }
 
-    private Type handleHandleAvroArray(AbstractCollectionType collectionType, GroupType groupType) {
+    private Type handleAvroArray(AbstractCollectionType collectionType, GroupType groupType) {
         Types.GroupBuilder<GroupType> builder =
                 Types.buildGroup(groupType.getRepetition()).as(groupType.getLogicalTypeAnnotation());
         //There is only one child
@@ -157,47 +164,63 @@ public class AsterixTypeToParquetTypeVisitor implements IATypeVisitor<Type, Type
         if (type == MISSING) {
             return true;
         }
-        ATypeTag actualType = mapType(type);
+        //typeName is unique
+        FunctionCallInformation info = funcInfo.get(node.getTypeName());
+        ATypeTag actualType = mapType(type, context, info.getSourceLocation());
         ATypeTag expectedType = node.getTypeTag();
 
         boolean isNotExpected = actualType != expectedType;
         if (isNotExpected) {
-            //typeName is unique
-            FunctionCallInformation info = funcInfo.get(node.getTypeName());
             //If no warning is created, then it means it has been reported
-            Warning warning = info.createTypeMismatchWarning(expectedType, actualType);
+            Warning warning = null;
+            if (actualType != ATypeTag.SYSTEM_NULL) {
+                warning = info.createTypeMismatchWarning(expectedType, actualType);
+            }
             if (warning != null) {
                 //New warning that we saw for the first time. We should report it.
-                warnings.add(warning);
+                context.getWarnings().add(warning);
             }
         }
         return isNotExpected;
     }
 
-    private static ATypeTag mapType(Type parquetType) {
+    /* ****************************************
+     * Type checking methods
+     * ****************************************
+     */
+
+    public static ATypeTag mapType(Type parquetType, ParquetConverterContext context, SourceLocation sourceLocation) {
         LogicalTypeAnnotation typeAnnotation = parquetType.getLogicalTypeAnnotation();
-        if (!parquetType.isPrimitive() && typeAnnotation == null) {
-            return ATypeTag.OBJECT;
-        } else if (typeAnnotation == LogicalTypeAnnotation.listType()) {
-            return ATypeTag.ARRAY;
-        } else if (typeAnnotation == LogicalTypeAnnotation.stringType()) {
-            return ATypeTag.STRING;
+        if (!parquetType.isPrimitive()) {
+            if (typeAnnotation == null) {
+                return ATypeTag.OBJECT;
+            } else if (typeAnnotation == LogicalTypeAnnotation.listType()
+                    || typeAnnotation == LogicalTypeAnnotation.mapType()) {
+                return ATypeTag.ARRAY;
+            }
         } else {
             //Check other primitive types
-            PrimitiveType.PrimitiveTypeName primitiveTypeName = parquetType.asPrimitiveType().getPrimitiveTypeName();
-            switch (primitiveTypeName) {
+            PrimitiveType primitiveType = parquetType.asPrimitiveType();
+            switch (primitiveType.getPrimitiveTypeName()) {
                 case BOOLEAN:
                     return ATypeTag.BOOLEAN;
-                case INT32:
-                case INT64:
-                    return ATypeTag.BIGINT;
                 case FLOAT:
                 case DOUBLE:
                     return ATypeTag.DOUBLE;
-                default:
-                    throw new IllegalStateException("Unsupported type " + parquetType);
+                case INT32:
+                case INT64:
+                    return handleInt32Int64(primitiveType, context, sourceLocation);
+                case INT96:
+                    return ATypeTag.DATETIME;
+                case BINARY:
+                case FIXED_LEN_BYTE_ARRAY:
+                    return handleBinary(primitiveType, context, sourceLocation);
             }
         }
+
+        warnUnsupportedType(context, sourceLocation, parquetType);
+        //Use SYSTEM_NULL for unsupported types
+        return ATypeTag.SYSTEM_NULL;
     }
 
     private static Type getType(GroupType groupType, String fieldName) {
@@ -206,4 +229,93 @@ public class AsterixTypeToParquetTypeVisitor implements IATypeVisitor<Type, Type
         }
         return MISSING;
     }
+
+    private static ATypeTag handleInt32Int64(PrimitiveType type, ParquetConverterContext context,
+            SourceLocation sourceLocation) {
+        LogicalTypeAnnotation logicalType = type.getLogicalTypeAnnotation();
+        ATypeTag inferredTypeTag = ATypeTag.SYSTEM_NULL;
+        if (logicalType == null || logicalType instanceof IntLogicalTypeAnnotation) {
+            inferredTypeTag = ATypeTag.BIGINT;
+        } else if (logicalType instanceof DateLogicalTypeAnnotation) {
+            inferredTypeTag = ATypeTag.DATE;
+        } else if (logicalType instanceof TimeLogicalTypeAnnotation) {
+            inferredTypeTag = ATypeTag.TIME;
+        } else if (logicalType instanceof TimestampLogicalTypeAnnotation
+                && checkDatetime(type, context, sourceLocation)) {
+            TimestampLogicalTypeAnnotation tsType = (TimestampLogicalTypeAnnotation) logicalType;
+            warnIfUTCAdjustedAndZoneIdIsNotSet(context, sourceLocation, tsType.isAdjustedToUTC());
+            inferredTypeTag = ATypeTag.DATETIME;
+        } else if (logicalType instanceof DecimalLogicalTypeAnnotation) {
+            ensureDecimalToDoubleEnabled(type, context, sourceLocation);
+            inferredTypeTag = ATypeTag.DOUBLE;
+        }
+
+        //Unsupported type
+        return inferredTypeTag;
+    }
+
+    private static ATypeTag handleBinary(PrimitiveType type, ParquetConverterContext context,
+            SourceLocation sourceLocation) {
+        LogicalTypeAnnotation logicalType = type.getLogicalTypeAnnotation();
+        ATypeTag inferredTypeTag = ATypeTag.SYSTEM_NULL;
+        if (logicalType == null || logicalType == LogicalTypeAnnotation.bsonType()) {
+            inferredTypeTag = ATypeTag.BINARY;
+        } else if (logicalType == LogicalTypeAnnotation.stringType()
+                || logicalType == LogicalTypeAnnotation.enumType()) {
+            inferredTypeTag = ATypeTag.STRING;
+        } else if (logicalType == LogicalTypeAnnotation.jsonType()) {
+            //Parsing JSON could be of any type. if parseJson is disabled, return as String
+            inferredTypeTag = context.isParseJsonEnabled() ? ATypeTag.ANY : ATypeTag.STRING;
+        } else if (logicalType instanceof DecimalLogicalTypeAnnotation) {
+            ensureDecimalToDoubleEnabled(type, context, sourceLocation);
+            inferredTypeTag = ATypeTag.DOUBLE;
+        } else if (logicalType instanceof UUIDLogicalTypeAnnotation) {
+            inferredTypeTag = ATypeTag.UUID;
+        }
+
+        //Unsupported type
+        return inferredTypeTag;
+    }
+
+    private static boolean checkDatetime(PrimitiveType type, ParquetConverterContext context,
+            SourceLocation sourceLocation) {
+        if (type.getPrimitiveTypeName() == PrimitiveTypeName.INT32) {
+            //Only INT64 and INT96 are supported per parquet specification
+            warnUnsupportedType(context, sourceLocation, type);
+            return false;
+        }
+        return true;
+    }
+
+    private static void ensureDecimalToDoubleEnabled(PrimitiveType type, ParquetConverterContext context,
+            SourceLocation sourceLocation) {
+        if (!context.isDecimalToDoubleEnabled()) {
+            throw new AsterixParquetRuntimeException(
+                    new RuntimeDataException(ErrorCode.PARQUET_SUPPORTED_TYPE_WITH_OPTION, sourceLocation,
+                            type.toString(), ParquetOptions.DECIMAL_TO_DOUBLE));
+        }
+
+        DecimalLogicalTypeAnnotation decimalLogicalType =
+                (DecimalLogicalTypeAnnotation) type.getLogicalTypeAnnotation();
+        int precision = decimalLogicalType.getPrecision();
+        if (precision > DecimalConverter.LONG_MAX_PRECISION) {
+            context.getWarnings().add(Warning.of(null, ErrorCode.PARQUET_DECIMAL_TO_DOUBLE_PRECISION_LOSS, precision,
+                    DecimalConverter.LONG_MAX_PRECISION));
+        }
+    }
+
+    public static void warnUnsupportedType(ParquetConverterContext context, SourceLocation sourceLocation,
+            Type parquetType) {
+        context.getWarnings()
+                .add(Warning.of(sourceLocation, ErrorCode.UNSUPPORTED_PARQUET_TYPE, parquetType.toString()));
+    }
+
+    private static void warnIfUTCAdjustedAndZoneIdIsNotSet(ParquetConverterContext context,
+            SourceLocation sourceLocation, boolean adjustedToUTC) {
+        if (adjustedToUTC && context.getTimeZoneId().isEmpty()) {
+            Warning warning = Warning.of(sourceLocation, ErrorCode.PARQUET_TIME_ZONE_ID_IS_NOT_SET, ATypeTag.DATETIME,
+                    ParquetOptions.TIMEZONE);
+            context.getWarnings().add(warning);
+        }
+    }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AtomicConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AtomicConverter.java
deleted file mode 100644
index 7219bdd..0000000
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AtomicConverter.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
-
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.asterix.external.input.stream.StandardUTF8ToModifiedUTF8DataOutput;
-import org.apache.asterix.external.parser.jackson.ParserContext;
-import org.apache.asterix.om.types.ATypeTag;
-import org.apache.hyracks.data.std.api.IValueReference;
-import org.apache.parquet.io.api.Binary;
-import org.apache.parquet.io.api.PrimitiveConverter;
-
-/**
- * Currently, only JSON types are supported (string, number, boolean)
- */
-class AtomicConverter extends PrimitiveConverter implements IFieldValue {
-    private final AbstractComplexConverter parent;
-    private final IValueReference fieldName;
-    private final int index;
-    private final ParserContext context;
-
-    public AtomicConverter(AbstractComplexConverter parent, int index, ParserContext context) {
-        this(parent, null, index, context);
-    }
-
-    public AtomicConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
-            ParserContext context) {
-        this.parent = parent;
-        this.fieldName = fieldName;
-        this.index = index;
-        this.context = context;
-    }
-
-    @Override
-    public void addBinary(Binary value) {
-        final DataOutput out = parent.getDataOutput();
-        final StandardUTF8ToModifiedUTF8DataOutput stringOut = context.getModifiedUTF8DataOutput();
-        stringOut.setDataOutput(out);
-        try {
-            out.writeByte(ATypeTag.STRING.serialize());
-            value.writeTo(stringOut);
-        } catch (IOException e) {
-            throw new IllegalStateException(e);
-        }
-        parent.addValue(this);
-    }
-
-    @Override
-    public void addBoolean(boolean value) {
-        final DataOutput out = parent.getDataOutput();
-        try {
-            out.writeByte(ATypeTag.BOOLEAN.serialize());
-            out.writeBoolean(value);
-        } catch (IOException e) {
-            throw new IllegalStateException(e);
-        }
-        parent.addValue(this);
-    }
-
-    @Override
-    public void addFloat(float value) {
-        addDouble(value);
-    }
-
-    @Override
-    public void addDouble(double value) {
-        final DataOutput out = parent.getDataOutput();
-        try {
-            out.writeByte(ATypeTag.DOUBLE.serialize());
-            out.writeDouble(value);
-        } catch (IOException e) {
-            throw new IllegalStateException(e);
-        }
-        parent.addValue(this);
-    }
-
-    @Override
-    public void addInt(int value) {
-        addLong(value);
-    }
-
-    @Override
-    public void addLong(long value) {
-        final DataOutput out = parent.getDataOutput();
-        try {
-            out.writeByte(ATypeTag.BIGINT.serialize());
-            out.writeLong(value);
-        } catch (IOException e) {
-            throw new IllegalStateException(e);
-        }
-        parent.addValue(this);
-    }
-
-    @Override
-    public IValueReference getFieldName() {
-        return fieldName;
-    }
-
-    @Override
-    public int getIndex() {
-        return index;
-    }
-}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetFileRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetFileRecordReader.java
index cc9b34c..9c1d70a 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetFileRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetFileRecordReader.java
@@ -58,7 +58,11 @@ public class ParquetFileRecordReader<V extends IValueReference> extends Abstract
     @SuppressWarnings("unchecked")
     @Override
     protected RecordReader<Void, V> getRecordReader(int splitIndex) throws IOException {
-        reader = (RecordReader<Void, V>) inputFormat.getRecordReader(inputSplits[splitIndex], conf, Reporter.NULL);
+        try {
+            reader = (RecordReader<Void, V>) inputFormat.getRecordReader(inputSplits[splitIndex], conf, Reporter.NULL);
+        } catch (AsterixParquetRuntimeException e) {
+            throw e.getHyracksDataException();
+        }
         if (value == null) {
             value = reader.createValue();
         }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetReadSupport.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetReadSupport.java
index aac293d..797a2b2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetReadSupport.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetReadSupport.java
@@ -19,10 +19,12 @@
 package org.apache.asterix.external.input.record.reader.hdfs.parquet;
 
 import java.io.IOException;
-import java.util.Collections;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.RootConverter;
 import org.apache.asterix.external.util.HDFSUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.runtime.projection.FunctionCallInformation;
@@ -39,25 +41,27 @@ public class ParquetReadSupport extends ReadSupport<IValueReference> {
     @Override
     public ReadContext init(InitContext context) {
         MessageType requestedSchema = getRequestedSchema(context);
-        return new ReadContext(requestedSchema, Collections.emptyMap());
+        return new ReadContext(requestedSchema);
     }
 
     @Override
     public RecordMaterializer<IValueReference> prepareForRead(Configuration configuration,
             Map<String, String> keyValueMetaData, MessageType fileSchema, ReadContext readContext) {
-        return new ADMRecordMaterializer(readContext);
+        return new ADMRecordMaterializer(configuration, readContext);
     }
 
-    private static MessageType getRequestedSchema(InitContext context) {
-        Configuration configuration = context.getConfiguration();
-        MessageType fileSchema = context.getFileSchema();
-        AsterixTypeToParquetTypeVisitor visitor = new AsterixTypeToParquetTypeVisitor();
+    private static MessageType getRequestedSchema(InitContext initContext) {
+        Configuration configuration = initContext.getConfiguration();
+        MessageType fileSchema = initContext.getFileSchema();
+
+        List<Warning> warnings = new ArrayList<>();
+        ParquetConverterContext context = new ParquetConverterContext(configuration, warnings);
+        AsterixTypeToParquetTypeVisitor visitor = new AsterixTypeToParquetTypeVisitor(context);
         try {
             ARecordType expectedType = HDFSUtils.getExpectedType(configuration);
             Map<String, FunctionCallInformation> functionCallInformationMap =
                     HDFSUtils.getFunctionCallInformationMap(configuration);
             MessageType requestedType = visitor.clipType(expectedType, fileSchema, functionCallInformationMap);
-            List<Warning> warnings = visitor.getWarnings();
 
             if (!warnings.isEmpty()) {
                 //New warnings were created, set the warnings in hadoop configuration to be reported
@@ -73,13 +77,26 @@ public class ParquetReadSupport extends ReadSupport<IValueReference> {
 
     private static class ADMRecordMaterializer extends RecordMaterializer<IValueReference> {
         private final RootConverter rootConverter;
+        private final List<Warning> warnings;
+        private final Configuration configuration;
 
-        public ADMRecordMaterializer(ReadContext readContext) {
-            rootConverter = new RootConverter(readContext.getRequestedSchema());
+        public ADMRecordMaterializer(Configuration configuration, ReadContext readContext) {
+            warnings = new ArrayList<>();
+            rootConverter = new RootConverter(readContext.getRequestedSchema(), configuration, warnings);
+            this.configuration = configuration;
         }
 
         @Override
         public IValueReference getCurrentRecord() {
+            try {
+                if (!warnings.isEmpty()) {
+                    //Issue all pending warnings
+                    HDFSUtils.setWarnings(warnings, configuration);
+                    warnings.clear();
+                }
+            } catch (IOException e) {
+                throw new IllegalStateException(e);
+            }
             return rootConverter.getRecord();
         }
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/IFieldValue.java
similarity index 96%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
copy to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/IFieldValue.java
index 946b10c..c0ee37b 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/IFieldValue.java
@@ -16,7 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter;
 
 import org.apache.hyracks.data.std.api.IValueReference;
 
@@ -24,7 +24,7 @@ import org.apache.hyracks.data.std.api.IValueReference;
  * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
  * index of associated with a value.
  */
-interface IFieldValue {
+public interface IFieldValue {
     IValueReference getFieldName();
 
     /**
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/ParquetConverterContext.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/ParquetConverterContext.java
new file mode 100644
index 0000000..4982ca5
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/ParquetConverterContext.java
@@ -0,0 +1,274 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+import java.util.TimeZone;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.ABinarySerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
+import org.apache.asterix.external.input.stream.StandardUTF8ToModifiedUTF8DataOutput;
+import org.apache.asterix.external.parser.jackson.ParserContext;
+import org.apache.asterix.external.util.ExternalDataConstants.ParquetOptions;
+import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
+import org.apache.asterix.om.base.ABinary;
+import org.apache.asterix.om.base.ABoolean;
+import org.apache.asterix.om.base.ADate;
+import org.apache.asterix.om.base.ADateTime;
+import org.apache.asterix.om.base.ADouble;
+import org.apache.asterix.om.base.AInt64;
+import org.apache.asterix.om.base.AMutableDate;
+import org.apache.asterix.om.base.AMutableDateTime;
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.asterix.om.base.AMutableTime;
+import org.apache.asterix.om.base.ATime;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.Warning;
+import org.apache.hyracks.util.encoding.VarLenIntEncoderDecoder;
+import org.apache.hyracks.util.string.UTF8StringReader;
+import org.apache.hyracks.util.string.UTF8StringWriter;
+import org.apache.parquet.io.api.Binary;
+
+public class ParquetConverterContext extends ParserContext {
+    /*
+     * ************************************************************************
+     * Serializers/Deserializers
+     * ************************************************************************
+     */
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<ABoolean> booleanSerDer =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<AInt64> int64SerDer =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<ADouble> doubleSerDer =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<ABinary> binarySerDer =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABINARY);
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<ADate> dateSerDer =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<ATime> timeSerDer =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<ADateTime> datetimeSerDer =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
+
+    //Issued warnings
+    private final List<Warning> warnings;
+
+    /*
+     * ************************************************************************
+     * Binary values members
+     * ************************************************************************
+     */
+    private final StandardUTF8ToModifiedUTF8DataOutput modifiedUTF8DataOutput;
+    private byte[] lengthBytes;
+
+    /*
+     * ************************************************************************
+     * Mutable Values
+     * ************************************************************************
+     */
+
+    private final AMutableInt64 mutableInt64 = new AMutableInt64(0);
+    private final AMutableDouble mutableDouble = new AMutableDouble(0.0);
+    private final AMutableDate mutableDate = new AMutableDate(0);
+    private final AMutableTime mutableTime = new AMutableTime(0);
+    private final AMutableDateTime mutableDateTime = new AMutableDateTime(0);
+
+    /*
+     * ************************************************************************
+     * Type knobs
+     * ************************************************************************
+     */
+    private final boolean parseJson;
+    private final boolean decimalToDouble;
+
+    /*
+     * ************************************************************************
+     * Temporal Configuration
+     * ************************************************************************
+     */
+    private final String timeZoneId;
+    private final int timeZoneOffset;
+
+    public ParquetConverterContext(Configuration configuration, List<Warning> warnings) {
+        this.warnings = warnings;
+        modifiedUTF8DataOutput = new StandardUTF8ToModifiedUTF8DataOutput(
+                new AStringSerializerDeserializer(new UTF8StringWriter(), new UTF8StringReader()));
+
+        parseJson = configuration.getBoolean(ParquetOptions.HADOOP_PARSE_JSON_STRING, false);
+        decimalToDouble = configuration.getBoolean(ParquetOptions.HADOOP_DECIMAL_TO_DOUBLE, false);
+
+        String configuredTimeZoneId = configuration.get(ParquetOptions.HADOOP_TIMEZONE);
+        if (!configuredTimeZoneId.isEmpty()) {
+            timeZoneId = configuredTimeZoneId;
+            timeZoneOffset = TimeZone.getTimeZone(timeZoneId).getRawOffset();
+        } else {
+            timeZoneId = "";
+            timeZoneOffset = 0;
+        }
+    }
+
+    public List<Warning> getWarnings() {
+        return warnings;
+    }
+
+    public boolean isParseJsonEnabled() {
+        return parseJson;
+    }
+
+    public boolean isDecimalToDoubleEnabled() {
+        return decimalToDouble;
+    }
+
+    public String getTimeZoneId() {
+        return timeZoneId;
+    }
+
+    public int getTimeZoneOffset() {
+        return timeZoneOffset;
+    }
+
+    /*
+     * ************************************************************************
+     * Serialization methods
+     * All methods throws IllegalStateException as Parquet's converters methods
+     * do not throw any exceptions
+     * ************************************************************************
+     */
+
+    public void serializeBoolean(boolean value, DataOutput output) {
+        try {
+            booleanSerDer.serialize(value ? ABoolean.TRUE : ABoolean.FALSE, output);
+        } catch (HyracksDataException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public void serializeInt64(long value, DataOutput output) {
+        try {
+            mutableInt64.setValue(value);
+            int64SerDer.serialize(mutableInt64, output);
+        } catch (HyracksDataException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public void serializeDouble(double value, DataOutput output) {
+        try {
+            mutableDouble.setValue(value);
+            doubleSerDer.serialize(mutableDouble, output);
+        } catch (HyracksDataException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    /**
+     * String here is a binary UTF-8 String (not Java string) and not a modified-UTF8
+     *
+     * @param value  Parquet binary value
+     * @param output output to write the converted string
+     */
+    public void serializeString(Binary value, DataOutput output) {
+        //Set the destination to where to write the final modified UTF-8
+        modifiedUTF8DataOutput.setDataOutput(output);
+        try {
+            //Write the type tag
+            output.writeByte(ATypeTag.STRING.serialize());
+            //Write the binary UTF-8 string as
+            value.writeTo(modifiedUTF8DataOutput);
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public void serializeUUID(Binary value, DataOutput output) {
+        try {
+            output.writeByte(ATypeTag.UUID.serialize());
+            value.writeTo(output);
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    /**
+     * To avoid object creation when writing a binary value, we do not use {@link ABinarySerializerDeserializer}
+     * as calls to {@link Binary#getBytes()} could create new buffer each time we call this method
+     *
+     * @param value  Parquet binary value
+     * @param output output to write the binary value
+     */
+    public void serializeBinary(Binary value, DataOutput output) {
+        try {
+            output.writeByte(ATypeTag.BINARY.serialize());
+            writeLength(value.length(), output);
+            value.writeTo(output);
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public void serializeDate(int value, DataOutput output) {
+        try {
+            mutableDate.setValue(value);
+            dateSerDer.serialize(mutableDate, output);
+        } catch (HyracksDataException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public void serializeTime(int value, DataOutput output) {
+        try {
+            mutableTime.setValue(value);
+            timeSerDer.serialize(mutableTime, output);
+        } catch (HyracksDataException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public void serializeDateTime(long timestamp, DataOutput output) {
+        try {
+            mutableDateTime.setValue(timestamp);
+            datetimeSerDer.serialize(mutableDateTime, output);
+        } catch (HyracksDataException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    private void writeLength(int length, DataOutput out) throws IOException {
+        int requiredLength = VarLenIntEncoderDecoder.getBytesRequired(length);
+        if (lengthBytes == null || requiredLength > lengthBytes.length) {
+            lengthBytes = new byte[requiredLength];
+        }
+        VarLenIntEncoderDecoder.encode(length, lengthBytes, 0);
+        out.write(lengthBytes, 0, requiredLength);
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AbstractComplexConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/AbstractComplexConverter.java
similarity index 73%
rename from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AbstractComplexConverter.java
rename to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/AbstractComplexConverter.java
index 363d2d2..e6b80d0 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/AbstractComplexConverter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/AbstractComplexConverter.java
@@ -16,15 +16,17 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested;
 
 import java.io.DataOutput;
 
-import org.apache.asterix.external.parser.jackson.ParserContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.IFieldValue;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
 import org.apache.hyracks.data.std.api.IMutableValueStorage;
 import org.apache.hyracks.data.std.api.IValueReference;
 import org.apache.parquet.io.api.Converter;
 import org.apache.parquet.io.api.GroupConverter;
+import org.apache.parquet.io.api.PrimitiveConverter;
 import org.apache.parquet.schema.GroupType;
 import org.apache.parquet.schema.LogicalTypeAnnotation;
 import org.apache.parquet.schema.Type;
@@ -35,15 +37,16 @@ public abstract class AbstractComplexConverter extends GroupConverter implements
     private final IValueReference fieldName;
     private final int index;
     private final Converter[] converters;
-    protected final ParserContext context;
+    protected final ParquetConverterContext context;
     protected IMutableValueStorage tempStorage;
 
-    AbstractComplexConverter(AbstractComplexConverter parent, int index, GroupType parquetType, ParserContext context) {
+    AbstractComplexConverter(AbstractComplexConverter parent, int index, GroupType parquetType,
+            ParquetConverterContext context) {
         this(parent, null, index, parquetType, context);
     }
 
     AbstractComplexConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
-            GroupType parquetType, ParserContext context) {
+            GroupType parquetType, ParquetConverterContext context) {
         this.parent = parent;
         this.fieldName = fieldName;
         this.index = index;
@@ -51,14 +54,14 @@ public abstract class AbstractComplexConverter extends GroupConverter implements
         converters = new Converter[parquetType.getFieldCount()];
         for (int i = 0; i < parquetType.getFieldCount(); i++) {
             final Type type = parquetType.getType(i);
-            if (type == AsterixTypeToParquetTypeVisitor.MISSING) {
-                converters[i] = MissingConverter.INSTANCE;
-            } else if (type.isPrimitive()) {
+            if (type.isPrimitive()) {
                 converters[i] = createAtomicConverter(parquetType, i);
             } else if (LogicalTypeAnnotation.listType().equals(type.getLogicalTypeAnnotation())) {
                 converters[i] = createArrayConverter(parquetType, i);
             } else if (type.getRepetition() == Repetition.REPEATED) {
                 converters[i] = createRepeatedConverter(parquetType, i);
+            } else if (type.getLogicalTypeAnnotation() == LogicalTypeAnnotation.mapType()) {
+                converters[i] = createArrayConverter(parquetType, i);
             } else {
                 converters[i] = createObjectConverter(parquetType, i);
             }
@@ -70,13 +73,13 @@ public abstract class AbstractComplexConverter extends GroupConverter implements
      *
      * @param value Child value
      */
-    protected abstract void addValue(IFieldValue value);
+    public abstract void addValue(IFieldValue value);
 
-    protected abstract AtomicConverter createAtomicConverter(GroupType type, int index);
+    protected abstract PrimitiveConverter createAtomicConverter(GroupType type, int index);
 
-    protected abstract ArrayConverter createArrayConverter(GroupType type, int index);
+    protected abstract AbstractComplexConverter createArrayConverter(GroupType type, int index);
 
-    protected abstract ObjectConverter createObjectConverter(GroupType type, int index);
+    protected abstract AbstractComplexConverter createObjectConverter(GroupType type, int index);
 
     /**
      * Parquet file created by (old) Avro writer treat repeated values differently from files created by Spark.
@@ -104,12 +107,22 @@ public abstract class AbstractComplexConverter extends GroupConverter implements
      *    }
      * }
      *
+     * Map type:
+     * required group mapField (MAP) {
+     *    repeated group key_value {
+     *       required int32 key;
+     *       required int32 value;
+     *    }
+     * }
+     *
      * @formatter:on
      */
-    private AbstractComplexConverter createRepeatedConverter(GroupType type, int index) {
+    protected AbstractComplexConverter createRepeatedConverter(GroupType type, int index) {
         GroupType repeatedType = type.getType(index).asGroupType();
-        //The name "array" is used by Avro to represent group element (array of objects)
-        if (repeatedType.getFieldCount() > 1 || "array".equals(repeatedType.getName())) {
+        String name = repeatedType.getName();
+        if (repeatedType.getFieldCount() > 1 || "array".equals(name) || "key_value".equals(name)) {
+            //The name "array" and "key_value" are reserved names to represent array of objects
+            //"key_value" are for MAP type
             return new ObjectConverter(this, index, repeatedType, context);
         }
         return new RepeatedConverter(this, index, repeatedType, context);
@@ -130,7 +143,7 @@ public abstract class AbstractComplexConverter extends GroupConverter implements
         return converters[fieldIndex];
     }
 
-    protected DataOutput getDataOutput() {
+    public DataOutput getDataOutput() {
         tempStorage.reset();
         return tempStorage.getDataOutput();
     }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ArrayConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/ArrayConverter.java
similarity index 76%
rename from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ArrayConverter.java
rename to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/ArrayConverter.java
index 9e8da77..7eacc87 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ArrayConverter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/ArrayConverter.java
@@ -16,26 +16,31 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested;
 
 import java.io.IOException;
 
 import org.apache.asterix.builders.IAsterixListBuilder;
-import org.apache.asterix.external.parser.jackson.ParserContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.IFieldValue;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve.PrimitiveConverterProvider;
 import org.apache.asterix.om.pointables.base.DefaultOpenFieldType;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.io.api.PrimitiveConverter;
 import org.apache.parquet.schema.GroupType;
+import org.apache.parquet.schema.PrimitiveType;
 
 class ArrayConverter extends AbstractComplexConverter {
     private IAsterixListBuilder builder;
 
-    public ArrayConverter(AbstractComplexConverter parent, int index, GroupType parquetType, ParserContext context) {
+    public ArrayConverter(AbstractComplexConverter parent, int index, GroupType parquetType,
+            ParquetConverterContext context) {
         super(parent, index, parquetType, context);
     }
 
     public ArrayConverter(AbstractComplexConverter parent, IValueReference fieldName, int index, GroupType parquetType,
-            ParserContext context) {
+            ParquetConverterContext context) {
         super(parent, fieldName, index, parquetType, context);
     }
 
@@ -59,7 +64,7 @@ class ArrayConverter extends AbstractComplexConverter {
     }
 
     @Override
-    protected void addValue(IFieldValue value) {
+    public void addValue(IFieldValue value) {
         try {
             builder.addItem(tempStorage);
         } catch (HyracksDataException e) {
@@ -68,8 +73,9 @@ class ArrayConverter extends AbstractComplexConverter {
     }
 
     @Override
-    protected AtomicConverter createAtomicConverter(GroupType type, int index) {
-        return new AtomicConverter(this, index, context);
+    protected PrimitiveConverter createAtomicConverter(GroupType type, int index) {
+        PrimitiveType primitiveType = type.getType(index).asPrimitiveType();
+        return PrimitiveConverterProvider.createPrimitiveConverter(primitiveType, this, index, context);
     }
 
     @Override
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ObjectConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/ObjectConverter.java
similarity index 77%
rename from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ObjectConverter.java
rename to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/ObjectConverter.java
index 8736e7d..3c8bfcc 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ObjectConverter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/ObjectConverter.java
@@ -16,26 +16,31 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested;
 
 import java.io.IOException;
 
 import org.apache.asterix.builders.IARecordBuilder;
-import org.apache.asterix.external.parser.jackson.ParserContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.IFieldValue;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve.PrimitiveConverterProvider;
 import org.apache.asterix.om.pointables.base.DefaultOpenFieldType;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.io.api.PrimitiveConverter;
 import org.apache.parquet.schema.GroupType;
+import org.apache.parquet.schema.PrimitiveType;
 
 class ObjectConverter extends AbstractComplexConverter {
     private IARecordBuilder builder;
 
-    public ObjectConverter(AbstractComplexConverter parent, int index, GroupType parquetType, ParserContext context) {
+    public ObjectConverter(AbstractComplexConverter parent, int index, GroupType parquetType,
+            ParquetConverterContext context) {
         super(parent, index, parquetType, context);
     }
 
     public ObjectConverter(AbstractComplexConverter parent, IValueReference fieldName, int index, GroupType parquetType,
-            ParserContext context) {
+            ParquetConverterContext context) {
         super(parent, fieldName, index, parquetType, context);
     }
 
@@ -59,7 +64,7 @@ class ObjectConverter extends AbstractComplexConverter {
     }
 
     @Override
-    protected void addValue(IFieldValue value) {
+    public void addValue(IFieldValue value) {
         try {
             builder.addField(value.getFieldName(), getValue());
         } catch (HyracksDataException e) {
@@ -68,9 +73,11 @@ class ObjectConverter extends AbstractComplexConverter {
     }
 
     @Override
-    protected AtomicConverter createAtomicConverter(GroupType type, int index) {
+    protected PrimitiveConverter createAtomicConverter(GroupType type, int index) {
         try {
-            return new AtomicConverter(this, context.getSerializedFieldName(type.getFieldName(index)), index, context);
+            PrimitiveType primitiveType = type.getType(index).asPrimitiveType();
+            IValueReference fieldName = context.getSerializedFieldName(type.getFieldName(index));
+            return PrimitiveConverterProvider.createPrimitiveConverter(primitiveType, this, fieldName, index, context);
         } catch (IOException e) {
             throw new IllegalStateException(e);
         }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/RepeatedConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/RepeatedConverter.java
similarity index 67%
rename from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/RepeatedConverter.java
rename to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/RepeatedConverter.java
index d46d84c..09a104b 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/RepeatedConverter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/RepeatedConverter.java
@@ -16,15 +16,20 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested;
 
 import java.io.DataOutput;
 
-import org.apache.asterix.external.parser.jackson.ParserContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.IFieldValue;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve.PrimitiveConverterProvider;
+import org.apache.parquet.io.api.PrimitiveConverter;
 import org.apache.parquet.schema.GroupType;
+import org.apache.parquet.schema.PrimitiveType;
 
 class RepeatedConverter extends AbstractComplexConverter {
-    public RepeatedConverter(AbstractComplexConverter parent, int index, GroupType parquetType, ParserContext context) {
+    public RepeatedConverter(AbstractComplexConverter parent, int index, GroupType parquetType,
+            ParquetConverterContext context) {
         super(parent, index, parquetType, context);
     }
 
@@ -39,13 +44,14 @@ class RepeatedConverter extends AbstractComplexConverter {
     }
 
     @Override
-    protected void addValue(IFieldValue value) {
+    public void addValue(IFieldValue value) {
         parent.addValue(value);
     }
 
     @Override
-    protected AtomicConverter createAtomicConverter(GroupType type, int index) {
-        return new AtomicConverter(this, index, context);
+    protected PrimitiveConverter createAtomicConverter(GroupType type, int index) {
+        PrimitiveType primitiveType = type.getType(index).asPrimitiveType();
+        return PrimitiveConverterProvider.createPrimitiveConverter(primitiveType, this, index, context);
     }
 
     @Override
@@ -60,7 +66,7 @@ class RepeatedConverter extends AbstractComplexConverter {
     }
 
     @Override
-    protected DataOutput getDataOutput() {
+    public DataOutput getDataOutput() {
         return getParentDataOutput();
     }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/RootConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/RootConverter.java
similarity index 71%
rename from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/RootConverter.java
rename to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/RootConverter.java
index 76f4342..24a531a 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/RootConverter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/nested/RootConverter.java
@@ -16,20 +16,23 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested;
 
 import java.io.DataOutput;
+import java.util.List;
 
-import org.apache.asterix.external.parser.jackson.ParserContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hyracks.api.exceptions.Warning;
 import org.apache.hyracks.data.std.api.IValueReference;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.parquet.schema.GroupType;
 
-class RootConverter extends ObjectConverter {
+public class RootConverter extends ObjectConverter {
     private final ArrayBackedValueStorage rootBuffer;
 
-    public RootConverter(GroupType parquetType) {
-        super(null, -1, parquetType, new ParserContext(true));
+    public RootConverter(GroupType parquetType, Configuration configuration, List<Warning> warnings) {
+        super(null, -1, parquetType, new ParquetConverterContext(configuration, warnings));
         this.rootBuffer = new ArrayBackedValueStorage();
     }
 
@@ -39,7 +42,7 @@ class RootConverter extends ObjectConverter {
         return rootBuffer.getDataOutput();
     }
 
-    protected IValueReference getRecord() {
+    public IValueReference getRecord() {
         return rootBuffer;
     }
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/MissingConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/BinaryConverter.java
similarity index 60%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/MissingConverter.java
copy to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/BinaryConverter.java
index e38056b..15c1d2e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/MissingConverter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/BinaryConverter.java
@@ -16,45 +16,24 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
 
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
+import org.apache.hyracks.data.std.api.IValueReference;
 import org.apache.parquet.io.api.Binary;
-import org.apache.parquet.io.api.PrimitiveConverter;
 
-class MissingConverter extends PrimitiveConverter {
-    protected static final MissingConverter INSTANCE = new MissingConverter();
+public class BinaryConverter extends GenericPrimitiveConverter {
 
-    private MissingConverter() {
+    BinaryConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
+            ParquetConverterContext context) {
+        super(parent, fieldName, index, context);
     }
 
     @Override
     public void addBinary(Binary value) {
-        //NoOp
-    }
-
-    @Override
-    public void addBoolean(boolean value) {
-        //NoOp
-    }
-
-    @Override
-    public void addFloat(float value) {
-        //NoOp
-    }
-
-    @Override
-    public void addDouble(double value) {
-        //NoOp
-    }
-
-    @Override
-    public void addInt(int value) {
-        //NoOp
-    }
-
-    @Override
-    public void addLong(long value) {
-        //NoOp
+        context.serializeBinary(value, parent.getDataOutput());
+        parent.addValue(this);
     }
 
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/DateConverter.java
similarity index 59%
rename from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
rename to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/DateConverter.java
index 946b10c..c8737cd 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/IFieldValue.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/DateConverter.java
@@ -16,19 +16,21 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
 
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
 import org.apache.hyracks.data.std.api.IValueReference;
 
-/**
- * This interface is intended to extend {@link org.apache.parquet.io.api.Converter} to get the field name or the
- * index of associated with a value.
- */
-interface IFieldValue {
-    IValueReference getFieldName();
+class DateConverter extends GenericPrimitiveConverter {
+    DateConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
+            ParquetConverterContext context) {
+        super(parent, fieldName, index, context);
+    }
 
-    /**
-     * @return the index of the value as appeared in the schema
-     */
-    int getIndex();
+    @Override
+    public void addInt(int value) {
+        context.serializeDate(value, parent.getDataOutput());
+        parent.addValue(this);
+    }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/DecimalConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/DecimalConverter.java
new file mode 100644
index 0000000..e93bcf7
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/DecimalConverter.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.io.api.Binary;
+
+/**
+ * The decimal converter relies on java {@link BigDecimal} to convert decimal values. The converter could pressure
+ * the GC as we need to create {@link BigDecimal} object / value
+ */
+public class DecimalConverter extends GenericPrimitiveConverter {
+    public static final int LONG_MAX_PRECISION = 20;
+    private final int precision;
+    private final int scale;
+
+    DecimalConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
+            ParquetConverterContext context, int precision, int scale) {
+        super(parent, fieldName, index, context);
+        this.precision = precision;
+        this.scale = scale;
+    }
+
+    @Override
+    public void addInt(int value) {
+        addLong(value);
+    }
+
+    @Override
+    public void addLong(long value) {
+        addConvertedDouble(BigDecimal.valueOf(value, scale).doubleValue());
+    }
+
+    @Override
+    public void addBinary(Binary value) {
+        if (precision <= LONG_MAX_PRECISION) {
+            addLong(getUnscaledLong(value.toByteBuffer()));
+        } else {
+            //Unlimited precision
+            addConvertedDouble(new BigDecimal(new BigInteger(value.getBytes()), scale).doubleValue());
+        }
+    }
+
+    private void addConvertedDouble(double value) {
+        context.serializeDouble(value, parent.getDataOutput());
+        parent.addValue(this);
+    }
+
+    private static long getUnscaledLong(ByteBuffer buffer) {
+        byte[] bytes = buffer.array();
+        int start = buffer.arrayOffset() + buffer.position();
+        int end = buffer.arrayOffset() + buffer.limit();
+
+        long value = 0L;
+        for (int i = start; i < end; i++) {
+            value = (value << 8) | (bytes[i] & 0xFF);
+        }
+        int bits = 8 * (end - start);
+        return (value << (64 - bits)) >> (64 - bits);
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/GenericPrimitiveConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/GenericPrimitiveConverter.java
new file mode 100644
index 0000000..e0b0392
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/GenericPrimitiveConverter.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
+
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.IFieldValue;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.io.api.Binary;
+import org.apache.parquet.io.api.PrimitiveConverter;
+
+public class GenericPrimitiveConverter extends PrimitiveConverter implements IFieldValue {
+
+    protected final AbstractComplexConverter parent;
+    protected final IValueReference fieldName;
+    protected final int index;
+    protected final ParquetConverterContext context;
+
+    GenericPrimitiveConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
+            ParquetConverterContext context) {
+        this.parent = parent;
+        this.fieldName = fieldName;
+        this.index = index;
+        this.context = context;
+    }
+
+    @Override
+    public final IValueReference getFieldName() {
+        return fieldName;
+    }
+
+    @Override
+    public final int getIndex() {
+        return index;
+    }
+
+    @Override
+    public void addBinary(Binary value) {
+        context.serializeString(value, parent.getDataOutput());
+        parent.addValue(this);
+    }
+
+    @Override
+    public void addBoolean(boolean value) {
+        context.serializeBoolean(value, parent.getDataOutput());
+        parent.addValue(this);
+    }
+
+    @Override
+    public void addFloat(float value) {
+        addDouble(value);
+    }
+
+    @Override
+    public void addDouble(double value) {
+        context.serializeDouble(value, parent.getDataOutput());
+        parent.addValue(this);
+    }
+
+    @Override
+    public void addInt(int value) {
+        addLong(value);
+    }
+
+    @Override
+    public void addLong(long value) {
+        context.serializeInt64(value, parent.getDataOutput());
+        parent.addValue(this);
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/JsonStringConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/JsonStringConverter.java
new file mode 100644
index 0000000..258a10a
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/JsonStringConverter.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
+import org.apache.asterix.external.parser.JSONDataParser;
+import org.apache.asterix.om.pointables.base.DefaultOpenFieldType;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.hyracks.data.std.util.ByteArrayAccessibleInputStream;
+import org.apache.parquet.io.api.Binary;
+
+import com.fasterxml.jackson.core.JsonFactory;
+
+class JsonStringConverter extends GenericPrimitiveConverter {
+    private static final byte[] EMPTY = new byte[0];
+    private final JSONDataParser parser;
+    private final ByteArrayAccessibleInputStream in;
+
+    JsonStringConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
+            ParquetConverterContext context) {
+        super(parent, fieldName, index, context);
+        parser = new JSONDataParser(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE, new JsonFactory());
+        in = new ByteArrayAccessibleInputStream(EMPTY, 0, 0);
+        try {
+            parser.setInputStream(in);
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    @Override
+    public void addBinary(Binary value) {
+        byte[] bytes = value.getBytes();
+        in.setContent(bytes, 0, value.length());
+
+        DataOutput out = parent.getDataOutput();
+        try {
+            if (parser.parseAnyValue(out)) {
+                parent.addValue(this);
+            } else {
+                resetParser();
+            }
+        } catch (HyracksDataException e) {
+            resetParser();
+        }
+    }
+
+    private void resetParser() {
+        try {
+            parser.reset(in);
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/MissingConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/MissingConverter.java
similarity index 98%
copy from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/MissingConverter.java
copy to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/MissingConverter.java
index e38056b..996731d 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/MissingConverter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/MissingConverter.java
@@ -16,7 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
 
 import org.apache.parquet.io.api.Binary;
 import org.apache.parquet.io.api.PrimitiveConverter;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/PrimitiveConverterProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/PrimitiveConverterProvider.java
new file mode 100644
index 0000000..38c441a
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/PrimitiveConverterProvider.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
+
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.AsterixTypeToParquetTypeVisitor;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.io.api.PrimitiveConverter;
+import org.apache.parquet.schema.LogicalTypeAnnotation;
+import org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation;
+import org.apache.parquet.schema.LogicalTypeAnnotation.IntLogicalTypeAnnotation;
+import org.apache.parquet.schema.LogicalTypeAnnotation.TimeLogicalTypeAnnotation;
+import org.apache.parquet.schema.LogicalTypeAnnotation.TimeUnit;
+import org.apache.parquet.schema.LogicalTypeAnnotation.TimestampLogicalTypeAnnotation;
+import org.apache.parquet.schema.PrimitiveType;
+import org.apache.parquet.schema.Types;
+
+public class PrimitiveConverterProvider {
+    public static final PrimitiveType MISSING =
+            Types.optional(PrimitiveType.PrimitiveTypeName.BOOLEAN).named("MISSING");
+
+    private PrimitiveConverterProvider() {
+    }
+
+    public static PrimitiveConverter createPrimitiveConverter(PrimitiveType type, AbstractComplexConverter parent,
+            int index, ParquetConverterContext context) {
+        return createPrimitiveConverter(type, parent, null, index, context);
+    }
+
+    public static PrimitiveConverter createPrimitiveConverter(PrimitiveType type, AbstractComplexConverter parent,
+            IValueReference fieldName, int index, ParquetConverterContext context) {
+
+        if (type == MISSING) {
+            return MissingConverter.INSTANCE;
+        }
+
+        ATypeTag mappedType = AsterixTypeToParquetTypeVisitor.mapType(type, context, null);
+        switch (mappedType) {
+            case BOOLEAN:
+            case STRING:
+                return new GenericPrimitiveConverter(parent, fieldName, index, context);
+            case BIGINT:
+                return getIntConverter(type, parent, fieldName, index, context);
+            case DOUBLE:
+                return getDoubleConverter(type, parent, fieldName, index, context);
+            case BINARY:
+                return new BinaryConverter(parent, fieldName, index, context);
+            case UUID:
+                return new UUIDConverter(parent, fieldName, index, context);
+            case DATE:
+                return new DateConverter(parent, fieldName, index, context);
+            case TIME:
+                return getTimeConverter(type, parent, fieldName, index, context);
+            case DATETIME:
+                return getTimeStampConverter(type, parent, fieldName, index, context);
+            case ANY:
+                return new JsonStringConverter(parent, fieldName, index, context);
+            default:
+                return MissingConverter.INSTANCE;
+        }
+    }
+
+    private static PrimitiveConverter getIntConverter(PrimitiveType type, AbstractComplexConverter parent,
+            IValueReference fieldName, int index, ParquetConverterContext context) {
+        IntLogicalTypeAnnotation intType = (IntLogicalTypeAnnotation) type.getLogicalTypeAnnotation();
+        if (intType != null && !intType.isSigned()) {
+            return new UnsignedIntegerConverter(parent, fieldName, index, context);
+        }
+        return new GenericPrimitiveConverter(parent, fieldName, index, context);
+    }
+
+    private static PrimitiveConverter getDoubleConverter(PrimitiveType type, AbstractComplexConverter parent,
+            IValueReference fieldName, int index, ParquetConverterContext context) {
+        LogicalTypeAnnotation logicalType = type.getLogicalTypeAnnotation();
+        if (logicalType instanceof DecimalLogicalTypeAnnotation) {
+            DecimalLogicalTypeAnnotation decimalLogicalType = (DecimalLogicalTypeAnnotation) logicalType;
+            return new DecimalConverter(parent, fieldName, index, context, decimalLogicalType.getPrecision(),
+                    decimalLogicalType.getScale());
+
+        }
+        return new GenericPrimitiveConverter(parent, fieldName, index, context);
+    }
+
+    private static PrimitiveConverter getTimeConverter(PrimitiveType type, AbstractComplexConverter parent,
+            IValueReference fieldName, int index, ParquetConverterContext context) {
+        TimeLogicalTypeAnnotation timeLogicalType = (TimeLogicalTypeAnnotation) type.getLogicalTypeAnnotation();
+        return new TimeConverter(parent, fieldName, index, context, timeLogicalType.getUnit());
+    }
+
+    private static PrimitiveConverter getTimeStampConverter(PrimitiveType type, AbstractComplexConverter parent,
+            IValueReference fieldName, int index, ParquetConverterContext context) {
+        TimestampLogicalTypeAnnotation tsType = (TimestampLogicalTypeAnnotation) type.getLogicalTypeAnnotation();
+        if (tsType != null) {
+            int offset = tsType.isAdjustedToUTC() ? context.getTimeZoneOffset() : 0;
+            return new TimestampConverter(parent, fieldName, index, context, tsType.getUnit(), offset);
+        }
+        //INT96: the converter will convert the value to millis
+        return new TimestampConverter(parent, fieldName, index, context, TimeUnit.MILLIS, 0);
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/TimeConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/TimeConverter.java
new file mode 100644
index 0000000..fa9f36c
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/TimeConverter.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
+
+import java.util.concurrent.TimeUnit;
+
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.schema.LogicalTypeAnnotation;
+
+public class TimeConverter extends GenericPrimitiveConverter {
+    private final LogicalTypeAnnotation.TimeUnit timeUnit;
+
+    TimeConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
+            ParquetConverterContext context, LogicalTypeAnnotation.TimeUnit timeUnit) {
+        super(parent, fieldName, index, context);
+        this.timeUnit = timeUnit;
+    }
+
+    @Override
+    public void addInt(int value) {
+        addLong(value);
+    }
+
+    @Override
+    public void addLong(long value) {
+        int convertedTime = (int) getConvertedTime(timeUnit, value);
+        context.serializeTime(convertedTime, parent.getDataOutput());
+        parent.addValue(this);
+    }
+
+    public static long getConvertedTime(LogicalTypeAnnotation.TimeUnit timeUnit, long value) {
+        final long convertedTime;
+        switch (timeUnit) {
+            case MICROS:
+                convertedTime = TimeUnit.MICROSECONDS.toMillis(value);
+                break;
+            case NANOS:
+                convertedTime = TimeUnit.NANOSECONDS.toMillis(value);
+                break;
+            default:
+                //Millis
+                convertedTime = value;
+        }
+
+        return convertedTime;
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/TimestampConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/TimestampConverter.java
new file mode 100644
index 0000000..136febe
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/TimestampConverter.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.io.api.Binary;
+import org.apache.parquet.schema.LogicalTypeAnnotation;
+
+class TimestampConverter extends GenericPrimitiveConverter {
+    private static final long JULIAN_DAY_OF_EPOCH = 2440588;
+    private static final long MILLIS_PER_DAY = 86400000L;
+    private static final long NANOS_PER_MILLIS = 1000000L;
+
+    private final LogicalTypeAnnotation.TimeUnit timeUnit;
+    private final int timeZoneOffset;
+
+    TimestampConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
+            ParquetConverterContext context, LogicalTypeAnnotation.TimeUnit timeUnit, int timeZoneOffset) {
+        super(parent, fieldName, index, context);
+        this.timeUnit = timeUnit;
+        this.timeZoneOffset = timeZoneOffset;
+    }
+
+    /**
+     * Timestamp is an INT96 (Little Endian)
+     * INT96 timestamps are not adjusted to UTC and always considered as local timestamp
+     *
+     * @param value binary representation of INT96
+     */
+    @Override
+    public void addBinary(Binary value) {
+        ByteBuffer buffer = value.toByteBuffer().order(ByteOrder.LITTLE_ENDIAN);
+        long timeOfDayNanos = buffer.getLong();
+        int julianDay = buffer.getInt();
+        long timestamp = fromJulian(julianDay, timeOfDayNanos);
+        addLong(timestamp);
+    }
+
+    /**
+     * Timestamp is an INT64
+     *
+     * @param value long value
+     */
+    @Override
+    public void addLong(long value) {
+        long convertedTime = TimeConverter.getConvertedTime(timeUnit, value);
+        context.serializeDateTime(convertedTime + timeZoneOffset, parent.getDataOutput());
+        parent.addValue(this);
+    }
+
+    private static long fromJulian(int days, long nanos) {
+        return (days - JULIAN_DAY_OF_EPOCH) * MILLIS_PER_DAY + nanos / NANOS_PER_MILLIS;
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/MissingConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/UUIDConverter.java
similarity index 60%
rename from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/MissingConverter.java
rename to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/UUIDConverter.java
index e38056b..ec07c60 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/MissingConverter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/UUIDConverter.java
@@ -16,45 +16,22 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.input.record.reader.hdfs.parquet;
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
 
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
+import org.apache.hyracks.data.std.api.IValueReference;
 import org.apache.parquet.io.api.Binary;
-import org.apache.parquet.io.api.PrimitiveConverter;
 
-class MissingConverter extends PrimitiveConverter {
-    protected static final MissingConverter INSTANCE = new MissingConverter();
-
-    private MissingConverter() {
+public class UUIDConverter extends GenericPrimitiveConverter {
+    UUIDConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
+            ParquetConverterContext context) {
+        super(parent, fieldName, index, context);
     }
 
     @Override
     public void addBinary(Binary value) {
-        //NoOp
+        context.serializeUUID(value, parent.getDataOutput());
+        parent.addValue(this);
     }
-
-    @Override
-    public void addBoolean(boolean value) {
-        //NoOp
-    }
-
-    @Override
-    public void addFloat(float value) {
-        //NoOp
-    }
-
-    @Override
-    public void addDouble(double value) {
-        //NoOp
-    }
-
-    @Override
-    public void addInt(int value) {
-        //NoOp
-    }
-
-    @Override
-    public void addLong(long value) {
-        //NoOp
-    }
-
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/UnsignedIntegerConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/UnsignedIntegerConverter.java
new file mode 100644
index 0000000..763b8c8
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/converter/primitve/UnsignedIntegerConverter.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.primitve;
+
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.ParquetConverterContext;
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.converter.nested.AbstractComplexConverter;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.hyracks.api.exceptions.Warning;
+import org.apache.hyracks.data.std.api.IValueReference;
+
+public class UnsignedIntegerConverter extends GenericPrimitiveConverter {
+    private boolean overflowed;
+
+    UnsignedIntegerConverter(AbstractComplexConverter parent, IValueReference fieldName, int index,
+            ParquetConverterContext context) {
+        super(parent, fieldName, index, context);
+        overflowed = false;
+    }
+
+    @Override
+    public void addInt(int value) {
+        addLong(value & 0x00000000ffffffffL);
+    }
+
+    @Override
+    public void addLong(long value) {
+        if (value < 0) {
+            if (!overflowed) {
+                Warning warning = Warning.of(null, ErrorCode.PARQUET_CONTAINS_OVERFLOWED_BIGINT, ATypeTag.BIGINT);
+                context.getWarnings().add(warning);
+                //Ensure this warning to be issued once
+                overflowed = true;
+            }
+            return;
+        }
+        super.addLong(value);
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/jackson/ParserContext.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/jackson/ParserContext.java
index ef9ff08..1a4bbee 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/jackson/ParserContext.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/jackson/ParserContext.java
@@ -27,8 +27,6 @@ import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.IAsterixListBuilder;
 import org.apache.asterix.builders.ListBuilderFactory;
 import org.apache.asterix.builders.RecordBuilderFactory;
-import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
-import org.apache.asterix.external.input.stream.StandardUTF8ToModifiedUTF8DataOutput;
 import org.apache.asterix.external.parser.AbstractNestedDataParser;
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
 import org.apache.asterix.om.base.AMutableString;
@@ -41,8 +39,6 @@ import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IMutableValueStorage;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
-import org.apache.hyracks.util.string.UTF8StringReader;
-import org.apache.hyracks.util.string.UTF8StringWriter;
 
 /**
  * A state class that helps parsers of class {@link AbstractNestedDataParser} to maintain
@@ -67,15 +63,8 @@ public class ParserContext {
     private final ISerializerDeserializer<AString> stringSerDe;
     private final AMutableString aString;
 
-    //For parquet
-    private final StandardUTF8ToModifiedUTF8DataOutput modifiedUTF8DataOutput;
-
-    public ParserContext() {
-        this(false);
-    }
-
     @SuppressWarnings("unchecked")
-    public ParserContext(boolean allocateModfiedUTF8Writer) {
+    public ParserContext() {
         objectBuilderPool = new SoftObjectPool<>(new RecordBuilderFactory());
         arrayBuilderPool = new ObjectPool<>(new ListBuilderFactory(), ATypeTag.ARRAY);
         tempBufferPool = new SoftObjectPool<>(new AbvsBuilderFactory());
@@ -83,11 +72,6 @@ public class ParserContext {
         serializedFieldNames = new LRUMap<>(SERIALIZED_FIELDNAME_MAP_MAX_SIZE);
         stringSerDe = SerializerDeserializerProvider.INSTANCE.getAStringSerializerDeserializer();
         aString = new AMutableString("");
-        modifiedUTF8DataOutput =
-                allocateModfiedUTF8Writer
-                        ? new StandardUTF8ToModifiedUTF8DataOutput(
-                                new AStringSerializerDeserializer(new UTF8StringWriter(), new UTF8StringReader()))
-                        : null;
     }
 
     public IMutableValueStorage enterObject() {
@@ -155,9 +139,4 @@ public class ParserContext {
         tempBufferPool.recycle(tempBuffer);
         arrayBuilderPool.recycle(builder);
     }
-
-    public StandardUTF8ToModifiedUTF8DataOutput getModifiedUTF8DataOutput() {
-        return modifiedUTF8DataOutput;
-    }
-
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
index 89d1132..f0b9c90 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
@@ -21,9 +21,11 @@ package org.apache.asterix.external.util;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Set;
+import java.util.TimeZone;
 import java.util.function.LongSupplier;
 import java.util.function.Supplier;
 
+import org.apache.asterix.om.types.ATypeTag;
 import org.apache.hyracks.util.StorageUtil;
 
 public class ExternalDataConstants {
@@ -296,6 +298,42 @@ public class ExternalDataConstants {
     public static final String DEFINITION_FIELD_NAME = "definition";
     public static final String CONTAINER_NAME_FIELD_NAME = "container";
 
+    public static class ParquetOptions {
+        private ParquetOptions() {
+        }
+
+        //Prefix for hadoop configurations
+        private static final String ASTERIX_HADOOP_PREFIX = "org.apache.asterix.";
+
+        /**
+         * Parse Parquet's String JSON type into ADM
+         * Default: false
+         */
+        public static final String PARSE_JSON_STRING = "parse-json-string";
+        public static final String HADOOP_PARSE_JSON_STRING = ASTERIX_HADOOP_PREFIX + PARSE_JSON_STRING;
+
+        /**
+         * Rebase Decimal and parse it as {@link ATypeTag#DOUBLE}
+         * Default: false
+         */
+        public static final String DECIMAL_TO_DOUBLE = "decimal-to-double";
+        public static final String HADOOP_DECIMAL_TO_DOUBLE = ASTERIX_HADOOP_PREFIX + DECIMAL_TO_DOUBLE;
+
+        /**
+         * Time Zone ID to convert UTC time and timestamp {@link ATypeTag#TIME} and {@link ATypeTag#DATETIME}
+         * Default: ""
+         * Note: If a UTC adjusted time and/or timestamp exist in the parquet file, and no time zone id is provided,
+         * then we will return the UTC time and issue a warning about that.
+         */
+        public static final String TIMEZONE = "timezone";
+        public static final String HADOOP_TIMEZONE = ASTERIX_HADOOP_PREFIX + TIMEZONE;
+
+        /**
+         * Valid time zones that are supported by Java
+         */
+        public static final Set<String> VALID_TIME_ZONES = Set.of(TimeZone.getAvailableIDs());
+    }
+
     public static class AwsS3 {
         private AwsS3() {
             throw new AssertionError("do not instantiate");
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
index b38f21d..22040e2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
@@ -104,6 +104,7 @@ import org.apache.asterix.external.api.IInputStreamFactory;
 import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.external.input.record.reader.abstracts.AbstractExternalInputStreamFactory.IncludeExcludeMatcher;
 import org.apache.asterix.external.library.JavaLibrary;
+import org.apache.asterix.external.util.ExternalDataConstants.ParquetOptions;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.AUnionType;
@@ -823,15 +824,21 @@ public class ExternalDataUtils {
     }
 
     /**
-     * Validate the dataset type declared with a given type
+     * Validate Parquet dataset's declared type and configuration
      *
      * @param properties        external dataset configuration
      * @param datasetRecordType dataset declared type
      */
-    public static void validateType(Map<String, String> properties, ARecordType datasetRecordType)
-            throws CompilationException {
-        if (isParquetFormat(properties) && datasetRecordType.getFieldTypes().length != 0) {
-            throw new CompilationException(ErrorCode.UNSUPPORTED_TYPE_FOR_PARQUET, datasetRecordType.getTypeName());
+    public static void validateParquetTypeAndConfiguration(Map<String, String> properties,
+            ARecordType datasetRecordType) throws CompilationException {
+        if (isParquetFormat(properties)) {
+            if (datasetRecordType.getFieldTypes().length != 0) {
+                throw new CompilationException(ErrorCode.UNSUPPORTED_TYPE_FOR_PARQUET, datasetRecordType.getTypeName());
+            } else if (properties.containsKey(ParquetOptions.TIMEZONE)
+                    && !ParquetOptions.VALID_TIME_ZONES.contains(properties.get(ParquetOptions.TIMEZONE))) {
+                //Ensure the configured time zone id is correct
+                throw new CompilationException(ErrorCode.INVALID_TIMEZONE, properties.get(ParquetOptions.TIMEZONE));
+            }
         }
     }
 
@@ -1770,7 +1777,8 @@ public class ExternalDataUtils {
          * Builds the Azure Blob storage client using the provided configuration
          *
          * @param configuration properties
-         * @see <a href="https://docs.microsoft.com/en-us/azure/databricks/data/data-sources/azure/azure-storage">Azure Blob storage</a>
+         * @see <a href="https://docs.microsoft.com/en-us/azure/databricks/data/data-sources/azure/azure-storage">Azure
+         * Blob storage</a>
          */
         public static void configureAzureHdfsJobConf(JobConf conf, Map<String, String> configuration, String endPoint) {
             String container = configuration.get(ExternalDataConstants.CONTAINER_NAME_FIELD_NAME);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
index 28a0766..9e49d86 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
@@ -40,6 +40,7 @@ import org.apache.asterix.external.indexing.RecordId.RecordIdType;
 import org.apache.asterix.external.input.record.reader.hdfs.parquet.MapredParquetInputFormat;
 import org.apache.asterix.external.input.record.reader.hdfs.parquet.ParquetReadSupport;
 import org.apache.asterix.external.input.stream.HDFSInputStream;
+import org.apache.asterix.external.util.ExternalDataConstants.ParquetOptions;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.runtime.projection.DataProjectionInfo;
 import org.apache.asterix.runtime.projection.FunctionCallInformation;
@@ -224,24 +225,40 @@ public class HDFSUtils {
         }
 
         if (ExternalDataConstants.CLASS_NAME_PARQUET_INPUT_FORMAT.equals(formatClassName)) {
-            //Parquet configurations
-            conf.set(ParquetInputFormat.READ_SUPPORT_CLASS, ParquetReadSupport.class.getName());
-            //Get requested values
-            String requestedValues = configuration.get(ExternalDataConstants.KEY_REQUESTED_FIELDS);
-            if (requestedValues == null) {
-                //No value is requested, return the entire record
-                requestedValues = DataProjectionInfo.ALL_FIELDS_TYPE.getTypeName();
-            } else {
-                //Subset of the values were requested, set the functionCallInformation
-                conf.set(ExternalDataConstants.KEY_HADOOP_ASTERIX_FUNCTION_CALL_INFORMATION,
-                        configuration.get(ExternalDataConstants.KEY_HADOOP_ASTERIX_FUNCTION_CALL_INFORMATION));
-            }
-            conf.set(ExternalDataConstants.KEY_REQUESTED_FIELDS, requestedValues);
+            configureParquet(configuration, conf);
         }
 
         return conf;
     }
 
+    private static void configureParquet(Map<String, String> configuration, JobConf conf) {
+        //Parquet configurations
+        conf.set(ParquetInputFormat.READ_SUPPORT_CLASS, ParquetReadSupport.class.getName());
+
+        //Get requested values
+        String requestedValues = configuration.get(ExternalDataConstants.KEY_REQUESTED_FIELDS);
+        if (requestedValues == null) {
+            //No value is requested, return the entire record
+            requestedValues = DataProjectionInfo.ALL_FIELDS_TYPE.getTypeName();
+        } else {
+            //Subset of the values were requested, set the functionCallInformation
+            conf.set(ExternalDataConstants.KEY_HADOOP_ASTERIX_FUNCTION_CALL_INFORMATION,
+                    configuration.get(ExternalDataConstants.KEY_HADOOP_ASTERIX_FUNCTION_CALL_INFORMATION));
+        }
+        conf.set(ExternalDataConstants.KEY_REQUESTED_FIELDS, requestedValues);
+
+        //Parse JSON string as ADM?
+        conf.set(ParquetOptions.HADOOP_PARSE_JSON_STRING,
+                configuration.getOrDefault(ParquetOptions.PARSE_JSON_STRING, ExternalDataConstants.TRUE));
+
+        //Rebase and parse decimal as double?
+        conf.set(ParquetOptions.HADOOP_DECIMAL_TO_DOUBLE,
+                configuration.getOrDefault(ParquetOptions.DECIMAL_TO_DOUBLE, ExternalDataConstants.FALSE));
+        //Re-adjust the time zone for UTC-adjusted values
+        conf.set(ParquetOptions.HADOOP_TIMEZONE, configuration.getOrDefault(ParquetOptions.TIMEZONE, ""));
+
+    }
+
     public static AlgebricksAbsolutePartitionConstraint getPartitionConstraints(IApplicationContext appCtx,
             AlgebricksAbsolutePartitionConstraint clusterLocations) {
         if (clusterLocations == null) {

[asterixdb] 06/12: [ASTERIXDB-3007][COMP] Fix ConsolidateWindowOperatorsRule

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 11a30f10ab84c523cbea10b879bd1c2a245dda00
Author: Dmitry Lychagin <dm...@couchbase.com>
AuthorDate: Mon Jan 31 14:42:57 2022 -0800

     [ASTERIXDB-3007][COMP] Fix ConsolidateWindowOperatorsRule
    
    - user model changes: no
    - storage format changes: no
    - interface changes: no
    
    Details:
    - Fix ConsolidateWindowOperatorsRule to correclty merge window
      operator with subplans into window operator without subplans
    - Fix deep copy visitors for window operator with subplans
    - Add compiler sanity check code to verify that each nested tuple
      source operator correctly points to its datasource operator
    
    Change-Id: Ib9077a0331ab57cdd449426be77f05741d0778cc
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/15063
    Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Integration-Tests: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Reviewed-by: Dmitry Lychagin <dm...@couchbase.com>
    Reviewed-by: Ali Alsuliman <al...@gmail.com>
---
 .../queries/window/win_opt_02/win_opt_02_1.sqlpp   | 35 ++++++++++++++++++++
 .../results/window/win_opt_02/win_opt_02_1.plan    | 23 ++++++++++++++
 .../window/win_opt_02/win_opt_02.10.query.sqlpp    | 37 ++++++++++++++++++++++
 .../results/window/win_opt_02/win_opt_02.10.adm    | 10 ++++++
 ...calOperatorDeepCopyWithNewVariablesVisitor.java |  4 +--
 .../logical/visitors/OperatorDeepCopyVisitor.java  |  4 +--
 .../core/algebra/plan/PlanStructureVerifier.java   | 35 +++++++++++++++++++-
 .../rules/ConsolidateWindowOperatorsRule.java      | 20 ++++++++++--
 8 files changed, 161 insertions(+), 7 deletions(-)

diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries/window/win_opt_02/win_opt_02_1.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/window/win_opt_02/win_opt_02_1.sqlpp
new file mode 100644
index 0000000..def3a02
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/window/win_opt_02/win_opt_02_1.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Test fix for ASTERIXDB-3007
+ * Expected Res : SUCCESS
+ */
+
+with ds1 as (
+  select r as t, r*r as x
+  from range(1, 10) r
+)
+
+select t, x, dt, dx, int(v) as v, int(a) as a
+from ds1
+let dt = t - lag(t) over (order by t),
+    dx = x - lag(x) over (order by t),
+    v = dx/dt,
+    a = v - lag(v) over (order by t)
+order by t;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results/window/win_opt_02/win_opt_02_1.plan b/asterixdb/asterix-app/src/test/resources/optimizerts/results/window/win_opt_02/win_opt_02_1.plan
new file mode 100644
index 0000000..931e417
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results/window/win_opt_02/win_opt_02_1.plan
@@ -0,0 +1,23 @@
+-- DISTRIBUTE_RESULT  |LOCAL|
+  -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+    -- STREAM_PROJECT  |LOCAL|
+      -- ASSIGN  |LOCAL|
+        -- STREAM_PROJECT  |LOCAL|
+          -- ASSIGN  |LOCAL|
+            -- STREAM_PROJECT  |LOCAL|
+              -- WINDOW  |LOCAL|
+                      {
+                        -- AGGREGATE  |LOCAL|
+                          -- NESTED_TUPLE_SOURCE  |LOCAL|
+                      }
+                -- WINDOW  |LOCAL|
+                        {
+                          -- AGGREGATE  |LOCAL|
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                        }
+                  -- WINDOW_STREAM  |LOCAL|
+                    -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+                      -- STABLE_SORT [$$r(ASC)]  |LOCAL|
+                        -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+                          -- UNNEST  |UNPARTITIONED|
+                            -- EMPTY_TUPLE_SOURCE  |UNPARTITIONED|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/window/win_opt_02/win_opt_02.10.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/window/win_opt_02/win_opt_02.10.query.sqlpp
new file mode 100644
index 0000000..a6f448c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/window/win_opt_02/win_opt_02.10.query.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Test fix for ASTERIXDB-3007
+ * Expected Res : SUCCESS
+ */
+
+use test;
+
+with ds1 as (
+  select r as t, r*r as x
+  from range(1, 10) r
+)
+
+select t, x, dt, dx, int(v) as v, int(a) as a
+from ds1
+let dt = t - lag(t) over (order by t),
+    dx = x - lag(x) over (order by t),
+    v = dx/dt,
+    a = v - lag(v) over (order by t)
+order by t;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/window/win_opt_02/win_opt_02.10.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/window/win_opt_02/win_opt_02.10.adm
new file mode 100644
index 0000000..29322df
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/window/win_opt_02/win_opt_02.10.adm
@@ -0,0 +1,10 @@
+{ "t": 1, "x": 1, "dt": null, "dx": null, "v": null, "a": null }
+{ "t": 2, "x": 4, "dt": 1, "dx": 3, "v": 3, "a": null }
+{ "t": 3, "x": 9, "dt": 1, "dx": 5, "v": 5, "a": 2 }
+{ "t": 4, "x": 16, "dt": 1, "dx": 7, "v": 7, "a": 2 }
+{ "t": 5, "x": 25, "dt": 1, "dx": 9, "v": 9, "a": 2 }
+{ "t": 6, "x": 36, "dt": 1, "dx": 11, "v": 11, "a": 2 }
+{ "t": 7, "x": 49, "dt": 1, "dx": 13, "v": 13, "a": 2 }
+{ "t": 8, "x": 64, "dt": 1, "dx": 15, "v": 15, "a": 2 }
+{ "t": 9, "x": 81, "dt": 1, "dx": 17, "v": 17, "a": 2 }
+{ "t": 10, "x": 100, "dt": 1, "dx": 19, "v": 19, "a": 2 }
\ No newline at end of file
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
index 199c2e1..e242531 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
@@ -619,11 +619,11 @@ public class LogicalOperatorDeepCopyWithNewVariablesVisitor
         List<LogicalVariable> varCopy = deepCopyVariableList(op.getVariables());
         List<Mutable<ILogicalExpression>> exprCopy =
                 exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions());
-        List<ILogicalPlan> nestedPlansCopy = new ArrayList<>();
         WindowOperator opCopy = new WindowOperator(partitionExprCopy, orderExprCopy, frameValueExprCopy,
                 frameStartExprCopy, frameStartValidationExprCopy, frameEndExprCopy, frameEndValidationExprCopy,
                 frameExcludeExprCopy, op.getFrameExcludeNegationStartIdx(), frameExcludeUnaryExprCopy,
-                frameOffsetExprCopy, op.getFrameMaxObjects(), varCopy, exprCopy, nestedPlansCopy);
+                frameOffsetExprCopy, op.getFrameMaxObjects(), varCopy, exprCopy, null);
+        List<ILogicalPlan> nestedPlansCopy = opCopy.getNestedPlans();
         deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
         deepCopyPlanList(op.getNestedPlans(), nestedPlansCopy, opCopy);
         return opCopy;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
index 7b67af1..c2ee661 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
@@ -448,11 +448,11 @@ public class OperatorDeepCopyVisitor implements ILogicalOperatorVisitor<ILogical
         deepCopyVars(newVariables, op.getVariables());
         List<Mutable<ILogicalExpression>> newExpressions = new ArrayList<>();
         deepCopyExpressionRefs(newExpressions, op.getExpressions());
-        List<ILogicalPlan> newNestedPlans = new ArrayList<>();
         WindowOperator newWinOp = new WindowOperator(newPartitionExprs, newOrderExprs, newFrameValueExprs,
                 newFrameStartExprs, newFrameStartValidationExprs, newFrameEndExprs, newFrameEndValidationExprs,
                 newFrameExclusionExprs, op.getFrameExcludeNegationStartIdx(), newFrameExcludeUnaryExpr,
-                newFrameOffsetExpr, op.getFrameMaxObjects(), newVariables, newExpressions, newNestedPlans);
+                newFrameOffsetExpr, op.getFrameMaxObjects(), newVariables, newExpressions, null);
+        List<ILogicalPlan> newNestedPlans = newWinOp.getNestedPlans();
         for (ILogicalPlan nestedPlan : op.getNestedPlans()) {
             newNestedPlans.add(OperatorManipulationUtil.deepCopy(nestedPlan, newWinOp));
         }
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/plan/PlanStructureVerifier.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/plan/PlanStructureVerifier.java
index a072d11..2b40114 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/plan/PlanStructureVerifier.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/plan/PlanStructureVerifier.java
@@ -46,9 +46,11 @@ import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
 import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
 import org.apache.hyracks.algebricks.core.algebra.prettyprint.IPlanPrettyPrinter;
 import org.apache.hyracks.algebricks.core.algebra.typing.ITypingContext;
+import org.apache.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
 import org.apache.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
 import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
 
@@ -76,6 +78,11 @@ public final class PlanStructureVerifier {
 
     private static final String ERROR_MESSAGE_TEMPLATE_6 = "undefined used variables %s in %s";
 
+    private static final String ERROR_MESSAGE_TEMPLATE_7 =
+            "unexpected source operator in NestedTupleSourceOperator: %s. Expected source operator %s";
+
+    private static final String ERROR_MESSAGE_TEMPLATE_8 = "unexpected leaf operator in nested plan: %s";
+
     public static final Comparator<LogicalVariable> VARIABLE_CMP = Comparator.comparing(LogicalVariable::toString);
 
     private final ExpressionReferenceVerifierVisitor exprVisitor = new ExpressionReferenceVerifierVisitor();
@@ -185,7 +192,10 @@ public final class PlanStructureVerifier {
         if (op instanceof AbstractOperatorWithNestedPlans) {
             children = new ArrayList<>(children);
             for (ILogicalPlan nestedPlan : ((AbstractOperatorWithNestedPlans) op).getNestedPlans()) {
-                children.addAll(nestedPlan.getRoots());
+                for (Mutable<ILogicalOperator> nestedRootRef : nestedPlan.getRoots()) {
+                    checkLeafOperatorsInNestedPlan(op, nestedRootRef);
+                    children.add(nestedRootRef);
+                }
             }
         }
         return children;
@@ -262,6 +272,29 @@ public final class PlanStructureVerifier {
         }
     }
 
+    private void checkLeafOperatorsInNestedPlan(ILogicalOperator op, Mutable<ILogicalOperator> rootRef)
+            throws AlgebricksException {
+        for (Mutable<ILogicalOperator> leafRef : OperatorManipulationUtil.findLeafDescendantsOrSelf(rootRef)) {
+            ILogicalOperator leafOp = leafRef.getValue();
+            switch (leafOp.getOperatorTag()) {
+                case EMPTYTUPLESOURCE:
+                    break;
+                case NESTEDTUPLESOURCE:
+                    NestedTupleSourceOperator ntsOp = (NestedTupleSourceOperator) leafOp;
+                    ILogicalOperator ntsSrcOp = ntsOp.getDataSourceReference().getValue();
+                    if (ntsSrcOp != op) {
+                        throw new AlgebricksException(String.format(ERROR_MESSAGE_TEMPLATE_7,
+                                PlanStabilityVerifier.printOperator(ntsSrcOp, prettyPrinter),
+                                PlanStabilityVerifier.printOperator(op, prettyPrinter)));
+                    }
+                    break;
+                default:
+                    throw new AlgebricksException(String.format(ERROR_MESSAGE_TEMPLATE_8,
+                            PlanStabilityVerifier.printOperator(leafOp, prettyPrinter)));
+            }
+        }
+    }
+
     private void raiseException(String sharedReferenceKind, String sharedEntity, ILogicalOperator firstOp,
             ILogicalOperator secondOp) throws AlgebricksException {
         String errorMessage;
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ConsolidateWindowOperatorsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ConsolidateWindowOperatorsRule.java
index 2ec0654..317dabb 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ConsolidateWindowOperatorsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ConsolidateWindowOperatorsRule.java
@@ -33,10 +33,12 @@ import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
 import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.WindowOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.IsomorphismOperatorVisitor;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.IsomorphismUtilities;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import org.apache.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
 import org.apache.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
 import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
 
@@ -82,7 +84,9 @@ public class ConsolidateWindowOperatorsRule implements IAlgebraicRewriteRule {
 
         Set<LogicalVariable> used1 = new HashSet<>();
         VariableUtilities.getUsedVariables(winOp1, used1);
-        if (!OperatorPropertiesUtil.disjoint(winOp2.getVariables(), used1)) {
+        Set<LogicalVariable> produced2 = new HashSet<>();
+        VariableUtilities.getProducedVariables(winOp2, produced2);
+        if (!OperatorPropertiesUtil.disjoint(produced2, used1)) {
             return false;
         }
 
@@ -130,7 +134,6 @@ public class ConsolidateWindowOperatorsRule implements IAlgebraicRewriteRule {
             aggTo.getExpressions().addAll(aggFrom.getExpressions());
             context.computeAndSetTypeEnvironmentForOperator(aggTo);
         } else {
-            setAll(winOpTo.getNestedPlans(), winOpFrom.getNestedPlans());
             setAll(winOpTo.getFrameValueExpressions(), winOpFrom.getFrameValueExpressions());
             setAll(winOpTo.getFrameStartExpressions(), winOpFrom.getFrameStartExpressions());
             setAll(winOpTo.getFrameStartValidationExpressions(), winOpFrom.getFrameStartValidationExpressions());
@@ -141,6 +144,19 @@ public class ConsolidateWindowOperatorsRule implements IAlgebraicRewriteRule {
             winOpTo.getFrameExcludeUnaryExpression().setValue(winOpFrom.getFrameExcludeUnaryExpression().getValue());
             winOpTo.getFrameOffsetExpression().setValue(winOpFrom.getFrameOffsetExpression().getValue());
             winOpTo.setFrameMaxObjects(winOpFrom.getFrameMaxObjects());
+            // move nested plans
+            for (ILogicalPlan fromNestedPlan : winOpFrom.getNestedPlans()) {
+                for (Mutable<ILogicalOperator> rootRef : fromNestedPlan.getRoots()) {
+                    for (Mutable<ILogicalOperator> leafRef : OperatorManipulationUtil
+                            .findLeafDescendantsOrSelf(rootRef)) {
+                        ILogicalOperator leafOp = leafRef.getValue();
+                        if (leafOp.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
+                            ((NestedTupleSourceOperator) leafOp).getDataSourceReference().setValue(winOpTo);
+                        }
+                    }
+                }
+                winOpTo.getNestedPlans().add(fromNestedPlan);
+            }
         }
         return true;
     }

[asterixdb] 03/12: [NO ISSUE][CONF] Removed unused active partitions config

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 5f52a5f7059691be915d781e9370d5295273dded
Author: Murtadha Hubail <mu...@couchbase.com>
AuthorDate: Thu Jan 27 23:17:03 2022 +0300

    [NO ISSUE][CONF] Removed unused active partitions config
    
    - user model changes: no
    - storage format changes: no
    - interface changes: no
    
    Details:
    
    - Remove unused active partitions config.
    - Improved node partitions logging.
    - Logging fixes.
    
    Change-Id: I4f2a611e2846405738401310f485db0e72844031
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/15024
    Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Integration-Tests: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Reviewed-by: Ali Alsuliman <al...@gmail.com>
---
 .../apache/asterix/app/nc/NCAppRuntimeContext.java   |  2 +-
 .../org/apache/asterix/app/nc/ReplicaManager.java    | 11 +++--------
 .../asterix/app/nc/task/UpdateNodeStatusTask.java    |  4 ++--
 .../app/replication/NcLifecycleCoordinator.java      |  5 +++--
 .../message/NCLifecycleTaskReportMessage.java        |  6 ++++++
 .../message/RegistrationTasksRequestMessage.java     | 20 +++++++++++++-------
 .../message/RegistrationTasksResponseMessage.java    |  8 ++------
 .../asterix/hyracks/bootstrap/NCApplication.java     |  4 ++--
 .../asterix/common/config/MetadataProperties.java    |  4 ++--
 .../apache/asterix/common/config/NodeProperties.java |  3 +--
 .../asterix/common/config/PropertiesAccessor.java    | 18 +++++++-----------
 .../apache/asterix/common/utils/NcLocalCounters.java |  6 ++++++
 12 files changed, 48 insertions(+), 43 deletions(-)

diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
index f532352..1a89168 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
@@ -224,7 +224,7 @@ public class NCAppRuntimeContext implements INcApplicationContext {
                 new DatasetLifecycleManager(storageProperties, localResourceRepository, txnSubsystem.getLogManager(),
                         virtualBufferCache, indexCheckpointManagerProvider, ioManager.getIODevices().size());
         final String nodeId = getServiceContext().getNodeId();
-        final Set<Integer> nodePartitions = metadataProperties.getNodeActivePartitions(nodeId);
+        final Set<Integer> nodePartitions = metadataProperties.getNodePartitions(nodeId);
         replicaManager = new ReplicaManager(this, nodePartitions);
         isShuttingdown = false;
         activeManager = new ActiveManager(threadExecutor, getServiceContext().getNodeId(),
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/ReplicaManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/ReplicaManager.java
index 7c4b59c..7b52df6 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/ReplicaManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/ReplicaManager.java
@@ -19,7 +19,6 @@
 package org.apache.asterix.app.nc;
 
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -31,7 +30,6 @@ import java.util.stream.Stream;
 
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.asterix.common.api.INcApplicationContext;
-import org.apache.asterix.common.cluster.ClusterPartition;
 import org.apache.asterix.common.replication.IPartitionReplica;
 import org.apache.asterix.common.storage.IReplicaManager;
 import org.apache.asterix.common.storage.ReplicaIdentifier;
@@ -188,11 +186,8 @@ public class ReplicaManager implements IReplicaManager {
     }
 
     private void setNodeOwnedPartitions(INcApplicationContext appCtx) {
-        ClusterPartition[] clusterPartitions =
-                appCtx.getMetadataProperties().getNodePartitions().get(appCtx.getServiceContext().getNodeId());
-        if (clusterPartitions != null) {
-            nodeOwnedPartitions.addAll(Arrays.stream(clusterPartitions).map(ClusterPartition::getPartitionId)
-                    .collect(Collectors.toList()));
-        }
+        Set<Integer> nodePartitions =
+                appCtx.getMetadataProperties().getNodePartitions(appCtx.getServiceContext().getNodeId());
+        nodeOwnedPartitions.addAll(nodePartitions);
     }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/UpdateNodeStatusTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/UpdateNodeStatusTask.java
index fe579ad..17eff4a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/UpdateNodeStatusTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/UpdateNodeStatusTask.java
@@ -36,7 +36,7 @@ public class UpdateNodeStatusTask implements INCLifecycleTask {
     private static final Logger LOGGER = LogManager.getLogger();
     private static final long serialVersionUID = 2L;
     private final NodeStatus status;
-    private Set<Integer> activePartitions;
+    private final Set<Integer> activePartitions;
 
     public UpdateNodeStatusTask(NodeStatus status, Set<Integer> activePartitions) {
         this.status = status;
@@ -61,6 +61,6 @@ public class UpdateNodeStatusTask implements INCLifecycleTask {
 
     @Override
     public String toString() {
-        return "{ \"class\" : \"" + getClass().getSimpleName() + "\" }";
+        return "UpdateNodeStatusTask{" + "status=" + status + ", activePartitions=" + activePartitions + '}';
     }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NcLifecycleCoordinator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NcLifecycleCoordinator.java
index 22a0a84..06005a9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NcLifecycleCoordinator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NcLifecycleCoordinator.java
@@ -168,8 +168,9 @@ public class NcLifecycleCoordinator implements INcLifecycleCoordinator {
 
     protected List<INCLifecycleTask> buildNCRegTasks(String nodeId, NodeStatus nodeStatus, SystemState state,
             Set<Integer> activePartitions) {
-        LOGGER.info("Building registration tasks for node {} with status {} and system state: {}", nodeId, nodeStatus,
-                state);
+        LOGGER.info(
+                "Building registration tasks for node {} with status {} and system state: {} and active partitions {}",
+                nodeId, nodeStatus, state, activePartitions);
         final boolean isMetadataNode = nodeId.equals(metadataNodeId);
         switch (nodeStatus) {
             case ACTIVE:
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java
index 1309369..b2a2dd7 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java
@@ -76,4 +76,10 @@ public class NCLifecycleTaskReportMessage implements INCLifecycleMessage, ICcAdd
     public Set<Integer> getActivePartitions() {
         return activePartitions;
     }
+
+    @Override
+    public String toString() {
+        return "NCLifecycleTaskReportMessage{" + "nodeId='" + nodeId + '\'' + ", success=" + success + ", exception="
+                + exception + ", localCounters=" + localCounters + ", activePartitions=" + activePartitions + '}';
+    }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java
index fb50b3e..9e95ac6 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.app.replication.message;
 
-import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
 
@@ -39,18 +38,18 @@ public class RegistrationTasksRequestMessage implements INCLifecycleMessage, ICc
 
     private static final Logger LOGGER = LogManager.getLogger();
     private static final long serialVersionUID = 2L;
-    protected final SystemState state;
-    protected final String nodeId;
-    protected final NodeStatus nodeStatus;
-    protected final Map<String, Object> secrets;
-    protected final Set<Integer> activePartitions;
+    private final SystemState state;
+    private final String nodeId;
+    private final NodeStatus nodeStatus;
+    private final Map<String, Object> secrets;
+    private final Set<Integer> activePartitions;
 
     public RegistrationTasksRequestMessage(String nodeId, NodeStatus nodeStatus, SystemState state,
             Map<String, Object> secretsEphemeral, Set<Integer> activePartitions) {
         this.state = state;
         this.nodeId = nodeId;
         this.nodeStatus = nodeStatus;
-        this.secrets = new HashMap<>(secretsEphemeral);
+        this.secrets = secretsEphemeral;
         this.activePartitions = activePartitions;
     }
 
@@ -59,6 +58,7 @@ public class RegistrationTasksRequestMessage implements INCLifecycleMessage, ICc
         try {
             RegistrationTasksRequestMessage msg = new RegistrationTasksRequestMessage(cs.getId(), nodeStatus,
                     systemState, secretsEphemeral, activePartitions);
+            LOGGER.info("sending {} to CC", msg);
             ((INCMessageBroker) cs.getContext().getMessageBroker()).sendMessageToCC(ccId, msg);
         } catch (Exception e) {
             LOGGER.log(Level.ERROR, "Unable to send RegistrationTasksRequestMessage to CC", e);
@@ -95,4 +95,10 @@ public class RegistrationTasksRequestMessage implements INCLifecycleMessage, ICc
     public Set<Integer> getActivePartitions() {
         return activePartitions;
     }
+
+    @Override
+    public String toString() {
+        return "RegistrationTasksRequestMessage{" + "state=" + state + ", nodeId='" + nodeId + '\'' + ", nodeStatus="
+                + nodeStatus + ", activePartitions=" + activePartitions + '}';
+    }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksResponseMessage.java
index f0a4a7c..f0f0470 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksResponseMessage.java
@@ -59,13 +59,9 @@ public class RegistrationTasksResponseMessage extends CcIdentifiedMessage
             Throwable exception = null;
             try {
                 for (INCLifecycleTask task : tasks) {
-                    if (LOGGER.isInfoEnabled()) {
-                        LOGGER.log(Level.INFO, "Starting startup task: " + task);
-                    }
+                    LOGGER.log(Level.INFO, "Starting startup task: {}", task);
                     task.perform(getCcId(), cs);
-                    if (LOGGER.isInfoEnabled()) {
-                        LOGGER.log(Level.INFO, "Completed startup task: " + task);
-                    }
+                    LOGGER.log(Level.INFO, "Completed startup task: {}", task);
                 }
             } catch (Throwable e) { //NOSONAR all startup failures should be reported to CC
                 LOGGER.log(Level.ERROR, "Failed during startup task", e);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
index be1cc7c..f60349f 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
@@ -300,12 +300,12 @@ public class NCApplication extends BaseNCApplication {
         final NodeStatus currentStatus = ncs.getNodeStatus();
         final SystemState systemState = isPendingStartupTasks(currentStatus, ncs.getPrimaryCcId(), ccId)
                 ? getCurrentSystemState() : SystemState.HEALTHY;
-        final Map httpSecrets =
+        final Map<String, Object> httpSecrets =
                 apiServer != null ? Collections.singletonMap(SYS_AUTH_HEADER, apiServer.ctx().get(SYS_AUTH_HEADER))
                         : Collections.emptyMap();
         RegistrationTasksRequestMessage.send(ccId, (NodeControllerService) ncServiceCtx.getControllerService(),
                 currentStatus, systemState, httpSecrets,
-                runtimeContext.getMetadataProperties().getNodeActivePartitions(nodeId));
+                runtimeContext.getMetadataProperties().getNodePartitions(nodeId));
     }
 
     @Override
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataProperties.java
index 31708d3..252017f 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataProperties.java
@@ -124,8 +124,8 @@ public class MetadataProperties extends AbstractProperties {
         return accessor.getClusterPartitions();
     }
 
-    public Set<Integer> getNodeActivePartitions(String nodeId) {
-        return accessor.getActivePartitions(nodeId);
+    public Set<Integer> getNodePartitions(String nodeId) {
+        return accessor.getNodePartitions(nodeId);
     }
 
     public Map<String, String> getTransactionLogDirs() {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/NodeProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/NodeProperties.java
index afb103d..522cabd 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/NodeProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/NodeProperties.java
@@ -46,8 +46,7 @@ public class NodeProperties extends AbstractProperties {
         STARTING_PARTITION_ID(
                 OptionTypes.INTEGER,
                 -1,
-                "The first partition id to assign to iodevices on this node (-1 == auto-assign)"),
-        ACTIVE_PARTITIONS(OptionTypes.STRING_ARRAY, null, "List of node active partitions");
+                "The first partition id to assign to iodevices on this node (-1 == auto-assign)");
 
         private final IOptionType type;
         private final Object defaultValue;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java
index 5ba378d..80f9a17 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java
@@ -41,6 +41,7 @@ import java.util.stream.Collectors;
 import org.apache.asterix.common.cluster.ClusterPartition;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.utils.PrintUtil;
 import org.apache.commons.lang3.mutable.MutableInt;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.api.config.IApplicationConfig;
@@ -69,7 +70,7 @@ public class PropertiesAccessor implements IApplicationConfig {
     /**
      * Constructor which wraps an IApplicationConfig.
      */
-    private PropertiesAccessor(IApplicationConfig cfg) throws AsterixException, IOException {
+    private PropertiesAccessor(IApplicationConfig cfg) throws AsterixException {
         this.cfg = cfg;
         nodePartitionsMap = new ConcurrentHashMap<>();
         clusterPartitions = Collections.synchronizedSortedMap(new TreeMap<>());
@@ -80,6 +81,7 @@ public class PropertiesAccessor implements IApplicationConfig {
         for (String ncName : cfg.getNCNames()) {
             configureNc(configManager, ncName, uniquePartitionId);
         }
+        LOGGER.info("configured partitions: {} from config {}", () -> PrintUtil.toString(nodePartitionsMap), () -> cfg);
         for (String section : cfg.getSectionNames()) {
             if (section.startsWith(AsterixProperties.SECTION_PREFIX_EXTENSION)) {
                 String className = AsterixProperties.getSectionId(AsterixProperties.SECTION_PREFIX_EXTENSION, section);
@@ -194,22 +196,16 @@ public class PropertiesAccessor implements IApplicationConfig {
         return clusterPartitions;
     }
 
-    public Set<Integer> getActivePartitions(String nodeId) {
-        // by default, node actives partitions are the partitions assigned to the node
-        String[] activePartitions = cfg.getStringArray(NodeProperties.Option.ACTIVE_PARTITIONS);
-        if (activePartitions == null) {
-            ClusterPartition[] nodeClusterPartitions = nodePartitionsMap.get(nodeId);
-            return Arrays.stream(nodeClusterPartitions).map(ClusterPartition::getPartitionId)
-                    .collect(Collectors.toSet());
-        }
-        return Arrays.stream(activePartitions).map(Integer::parseInt).collect(Collectors.toSet());
+    public Set<Integer> getNodePartitions(String nodeId) {
+        ClusterPartition[] nodeClusterPartitions = nodePartitionsMap.get(nodeId);
+        return Arrays.stream(nodeClusterPartitions).map(ClusterPartition::getPartitionId).collect(Collectors.toSet());
     }
 
     public List<AsterixExtension> getExtensions() {
         return extensions;
     }
 
-    public static PropertiesAccessor getInstance(IApplicationConfig cfg) throws IOException, AsterixException {
+    public static PropertiesAccessor getInstance(IApplicationConfig cfg) throws AsterixException {
         PropertiesAccessor accessor = instances.get(cfg);
         if (accessor == null) {
             accessor = new PropertiesAccessor(cfg);
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/NcLocalCounters.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/NcLocalCounters.java
index 5cf6724..8956b93 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/NcLocalCounters.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/NcLocalCounters.java
@@ -59,4 +59,10 @@ public class NcLocalCounters implements Serializable {
     public long getMaxJobId() {
         return maxJobId;
     }
+
+    @Override
+    public String toString() {
+        return "NcLocalCounters{" + "maxResourceId=" + maxResourceId + ", maxTxnId=" + maxTxnId + ", maxJobId="
+                + maxJobId + '}';
+    }
 }

[asterixdb] 02/12: [NO ISSUE][STO] Skip flush on datasets with no open indexes

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 9abc3a87479a4a62a21b6c316d3f9144ab181c2a
Author: Murtadha Hubail <mu...@couchbase.com>
AuthorDate: Fri Jan 28 01:17:16 2022 +0300

    [NO ISSUE][STO] Skip flush on datasets with no open indexes
    
    - user model changes: no
    - storage format changes: no
    - interface changes: no
    
    Details:
    
    - When attempting to flush a dataset, skip if the dataset has
      no open indexes.
    - Halt when no primary index is open while other indexes are
      open to clear the inconsistent memory state.
    
    Change-Id: Ib30f8bbbad03f5563ce27d573553f562d0ae484d
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/15025
    Contrib: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Integration-Tests: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Reviewed-by: Ali Alsuliman <al...@gmail.com>
    Tested-by: Murtadha Hubail <mh...@apache.org>
---
 .../asterix/common/context/PrimaryIndexOperationTracker.java | 12 ++++++++++--
 .../src/main/java/org/apache/hyracks/util/ExitUtil.java      |  1 +
 2 files changed, 11 insertions(+), 2 deletions(-)

diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java
index fb001a0..b0d8e02 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java
@@ -51,6 +51,7 @@ import org.apache.hyracks.storage.am.lsm.common.impls.FlushOperation;
 import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentId;
 import org.apache.hyracks.storage.common.IModificationOperationCallback;
 import org.apache.hyracks.storage.common.ISearchOperationCallback;
+import org.apache.hyracks.util.ExitUtil;
 import org.apache.hyracks.util.annotations.NotThreadSafe;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
@@ -132,6 +133,11 @@ public class PrimaryIndexOperationTracker extends BaseOperationTracker implement
                     throw new IllegalStateException(
                             "Can't request a flush on an index with active operations: " + numActiveOperations.get());
                 }
+                if (indexes.isEmpty()) {
+                    LOGGER.debug("no open indexes on dataset {} and partition {}... skipping flush",
+                            dsInfo.getDatasetID(), partition);
+                    return;
+                }
                 for (ILSMIndex lsmIndex : indexes) {
                     if (lsmIndex.isPrimaryIndex()) {
                         if (lsmIndex.isCurrentMutableComponentEmpty()) {
@@ -145,8 +151,10 @@ public class PrimaryIndexOperationTracker extends BaseOperationTracker implement
                 }
             }
             if (primaryLsmIndex == null) {
-                throw new IllegalStateException("Primary index not found in dataset " + dsInfo.getDatasetID()
-                        + " and partition " + partition + " open indexes " + indexes);
+                LOGGER.fatal(
+                        "Primary index not found in dataset {} and partition {} open indexes {}; halting to clear memory state",
+                        dsInfo.getDatasetID(), partition, indexes);
+                ExitUtil.halt(ExitUtil.EC_INCONSISTENT_STORAGE_REFERENCES);
             }
             for (ILSMIndex lsmIndex : indexes) {
                 ILSMOperationTracker opTracker = lsmIndex.getOperationTracker();
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java
index f4c4183..beabb5d 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java
@@ -56,6 +56,7 @@ public class ExitUtil {
     public static final int EC_ACTIVE_RESUME_FAILURE = 18;
     public static final int EC_NC_FAILED_TO_NOTIFY_TASKS_COMPLETED = 19;
     public static final int EC_FAILED_TO_CANCEL_ACTIVE_START_STOP = 22;
+    public static final int EC_INCONSISTENT_STORAGE_REFERENCES = 23;
     public static final int EC_IMMEDIATE_HALT = 33;
     public static final int EC_HALT_ABNORMAL_RESERVED_44 = 44;
     public static final int EC_IO_SCHEDULER_FAILED = 55;

[asterixdb] 12/12: Merge branch 'gerrit/neo'

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 412de587a1a6a54477e50ecd4d1c5690b51aa7ab
Merge: 4ba9293 e8c2042
Author: Michael Blow <mb...@apache.org>
AuthorDate: Wed Feb 2 12:56:19 2022 -0500

    Merge branch 'gerrit/neo'
    
    Change-Id: Ib75414ead94a7da9d8b5216c7b28d8ee198f31b5

 .../apache/asterix/app/nc/NCAppRuntimeContext.java |   2 +-
 .../org/apache/asterix/app/nc/ReplicaManager.java  |  11 +-
 .../asterix/app/nc/task/UpdateNodeStatusTask.java  |   4 +-
 .../app/replication/NcLifecycleCoordinator.java    |   5 +-
 .../message/NCLifecycleTaskReportMessage.java      |   6 +
 .../message/RegistrationTasksRequestMessage.java   |  20 +-
 .../message/RegistrationTasksResponseMessage.java  |   8 +-
 .../asterix/app/translator/QueryTranslator.java    |   9 +-
 .../asterix/hyracks/bootstrap/NCApplication.java   |   4 +-
 .../external_dataset/ExternalDatasetTestUtils.java |  20 +-
 .../aws/AwsS3ExternalDatasetTest.java              |   2 +-
 .../AzureBlobStorageExternalDatasetTest.java       |   2 +-
 .../{ => parquet}/BinaryFileConverterUtil.java     |  10 +-
 .../parquet/ParquetFileExampleGeneratorUtil.java   | 210 ++++++++++++
 .../apache/asterix/test/runtime/HDFSCluster.java   |   6 +-
 .../queries/window/win_opt_02/win_opt_02_1.sqlpp}  |  29 +-
 .../results/window/win_opt_02/win_opt_02_1.plan    |  23 ++
 .../parquet-types/decimal/decimal.1.ddl.sqlpp}     |  38 ++-
 .../parquet-types/decimal/decimal.2.query.sqlpp}   |  25 +-
 .../parquet-types/decimal/decimal.3.query.sqlpp}   |  22 +-
 .../disable-json-parsing.1.ddl.sqlpp}              |  29 +-
 .../disable-json-parsing.2.query.sqlpp}            |  22 +-
 .../invalid-timezone/temporal.1.ddl.sqlpp}         |  29 +-
 .../parquet-types/temporal/temporal.1.ddl.sqlpp}   |  29 +-
 .../parquet-types/temporal/tempral.2.query.sqlpp}  |  35 +-
 .../unset-flags/unset-flags.1.ddl.sqlpp}           |  28 +-
 .../unset-flags/unset-flags.2.query.sqlpp          |  60 ++++
 .../window/win_opt_02/win_opt_02.10.query.sqlpp}   |  29 +-
 .../parquet/parquet-types/decimal/decimal.02.adm   |   1 +
 .../disable-json-parsing.02.adm                    |   1 +
 .../parquet/parquet-types/temporal/temporal.02.adm |   1 +
 .../parquet-types/unset-flags/unset-flags.02.adm   |   1 +
 .../results/window/win_opt_02/win_opt_02.10.adm    |  10 +
 .../runtimets/testsuite_external_dataset_s3.xml    |  40 +++
 .../asterix/common/config/MetadataProperties.java  |   4 +-
 .../asterix/common/config/NodeProperties.java      |   3 +-
 .../asterix/common/config/PropertiesAccessor.java  |  18 +-
 .../context/PrimaryIndexOperationTracker.java      |  12 +-
 .../asterix/common/exceptions/ErrorCode.java       |   6 +
 .../asterix/common/utils/NcLocalCounters.java      |   6 +
 .../src/main/resources/asx_errormsg/en.properties  |   6 +
 .../asterix-doc/src/site/markdown/sqlpp/parquet.md | 363 +++++++++++++++++++++
 ...ue.java => AsterixParquetRuntimeException.java} |  22 +-
 .../parquet/AsterixTypeToParquetTypeVisitor.java   | 180 ++++++++--
 .../reader/hdfs/parquet/AtomicConverter.java       | 121 -------
 .../hdfs/parquet/ParquetFileRecordReader.java      |   6 +-
 .../reader/hdfs/parquet/ParquetReadSupport.java    |  37 ++-
 .../hdfs/parquet/{ => converter}/IFieldValue.java  |   4 +-
 .../parquet/converter/ParquetConverterContext.java | 274 ++++++++++++++++
 .../nested}/AbstractComplexConverter.java          |  45 ++-
 .../{ => converter/nested}/ArrayConverter.java     |  20 +-
 .../{ => converter/nested}/ObjectConverter.java    |  21 +-
 .../{ => converter/nested}/RepeatedConverter.java  |  20 +-
 .../{ => converter/nested}/RootConverter.java      |  15 +-
 .../primitve/BinaryConverter.java}                 |  41 +--
 .../primitve/DateConverter.java}                   |  24 +-
 .../converter/primitve/DecimalConverter.java       |  83 +++++
 .../primitve/GenericPrimitiveConverter.java        |  86 +++++
 .../converter/primitve/JsonStringConverter.java    |  77 +++++
 .../{ => converter/primitve}/MissingConverter.java |   2 +-
 .../primitve/PrimitiveConverterProvider.java       | 118 +++++++
 .../parquet/converter/primitve/TimeConverter.java  |  65 ++++
 .../converter/primitve/TimestampConverter.java     |  75 +++++
 .../primitve/UUIDConverter.java}                   |  43 +--
 .../primitve/UnsignedIntegerConverter.java         |  55 ++++
 .../external/parser/jackson/ParserContext.java     |  23 +-
 .../external/util/ExternalDataConstants.java       |  38 +++
 .../asterix/external/util/ExternalDataUtils.java   |  20 +-
 .../apache/asterix/external/util/HDFSUtils.java    |  43 ++-
 ...calOperatorDeepCopyWithNewVariablesVisitor.java |   4 +-
 .../logical/visitors/OperatorDeepCopyVisitor.java  |   4 +-
 .../core/algebra/plan/PlanStructureVerifier.java   |  35 +-
 .../core/algebra/plan/PlanVerifierTestBase.java    |   4 +-
 .../rules/ConsolidateWindowOperatorsRule.java      |  20 +-
 .../java/org/apache/hyracks/util/ExitUtil.java     |   1 +
 75 files changed, 2255 insertions(+), 560 deletions(-)

[asterixdb] 10/12: (Null) merge branch 'gerrit/stabilization-5e11053887'

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit f8403e35616ce3e0e02d4afb70f1e1a2baff1834
Merge: 11a30f1 9679811
Author: Michael Blow <mb...@apache.org>
AuthorDate: Wed Feb 2 08:05:35 2022 -0500

    (Null) merge branch 'gerrit/stabilization-5e11053887'
    
    Change-Id: Ib6f57109b6c45ff9ee1eda630df3e462570121fe


[asterixdb] 07/12: [NO ISSUE][OTH] Replace usage of FileUtils.listFiles

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 594eb450f359928bb8cd04859be0d208104fee4c
Author: Murtadha Hubail <mu...@couchbase.com>
AuthorDate: Wed Nov 3 13:04:16 2021 +0300

    [NO ISSUE][OTH] Replace usage of FileUtils.listFiles
    
    - user model changes: no
    - storage format changes: no
    - interface changes: no
    
    Details:
    
    - Avoid depending on FileUtils.listFiles in production code
      by replacing it with our own implementation.
    
    (cherry picked from commit f6306265e)
    
    Change-Id: I1ecb6143d5840fc12b35a641e1bc73c40bba3676
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/15083
    Reviewed-by: Michael Blow <mb...@apache.org>
    Reviewed-by: Hussain Towaileb <hu...@gmail.com>
    Integration-Tests: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Tested-by: Michael Blow <mb...@apache.org>
---
 .../PersistentLocalResourceRepository.java         | 48 ++++------------------
 .../java/org/apache/hyracks/api/util/IoUtil.java   | 36 ++++++++++++++++
 2 files changed, 45 insertions(+), 39 deletions(-)

diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
index 145be86..48b5b8e 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
@@ -57,7 +57,6 @@ import org.apache.asterix.common.storage.ResourceStorageStats;
 import org.apache.asterix.common.utils.StorageConstants;
 import org.apache.asterix.common.utils.StoragePathUtil;
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.filefilter.IOFileFilter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
@@ -93,41 +92,10 @@ public class PersistentLocalResourceRepository implements ILocalResourceReposito
     private static final FilenameFilter MASK_FILES_FILTER =
             (dir, name) -> name.startsWith(StorageConstants.MASK_FILE_PREFIX);
     private static final int MAX_CACHED_RESOURCES = 1000;
-    private static final IOFileFilter METADATA_FILES_FILTER = new IOFileFilter() {
-        @Override
-        public boolean accept(File file) {
-            return file.getName().equals(StorageConstants.METADATA_FILE_NAME);
-        }
-
-        @Override
-        public boolean accept(File dir, String name) {
-            return false;
-        }
-    };
-
-    private static final IOFileFilter METADATA_MASK_FILES_FILTER = new IOFileFilter() {
-        @Override
-        public boolean accept(File file) {
-            return file.getName().equals(METADATA_FILE_MASK_NAME);
-        }
-
-        @Override
-        public boolean accept(File dir, String name) {
-            return false;
-        }
-    };
-
-    private static final IOFileFilter ALL_DIR_FILTER = new IOFileFilter() {
-        @Override
-        public boolean accept(File file) {
-            return true;
-        }
-
-        @Override
-        public boolean accept(File dir, String name) {
-            return true;
-        }
-    };
+    private static final FilenameFilter METADATA_FILES_FILTER =
+            (dir, name) -> name.equals(StorageConstants.METADATA_FILE_NAME);
+    private static final FilenameFilter METADATA_MASK_FILES_FILTER =
+            (dir, name) -> name.equals(METADATA_FILE_MASK_NAME);
 
     // Finals
     private final IIOManager ioManager;
@@ -257,7 +225,10 @@ public class PersistentLocalResourceRepository implements ILocalResourceReposito
             throws HyracksDataException {
         Map<Long, LocalResource> resourcesMap = new HashMap<>();
         for (Path root : storageRoots) {
-            final Collection<File> files = FileUtils.listFiles(root.toFile(), METADATA_FILES_FILTER, ALL_DIR_FILTER);
+            if (!Files.exists(root) || !Files.isDirectory(root)) {
+                continue;
+            }
+            final Collection<File> files = IoUtil.getMatchingFiles(root, METADATA_FILES_FILTER);
             try {
                 for (File file : files) {
                     final LocalResource localResource = readLocalResource(file);
@@ -461,8 +432,7 @@ public class PersistentLocalResourceRepository implements ILocalResourceReposito
 
     public void deleteCorruptedResources() throws HyracksDataException {
         for (Path root : storageRoots) {
-            final Collection<File> metadataMaskFiles =
-                    FileUtils.listFiles(root.toFile(), METADATA_MASK_FILES_FILTER, ALL_DIR_FILTER);
+            final Collection<File> metadataMaskFiles = IoUtil.getMatchingFiles(root, METADATA_MASK_FILES_FILTER);
             for (File metadataMaskFile : metadataMaskFiles) {
                 final File resourceFile = new File(metadataMaskFile.getParent(), METADATA_FILE_NAME);
                 if (resourceFile.exists()) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/IoUtil.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/IoUtil.java
index 09ecb15..825fdd6 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/IoUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/IoUtil.java
@@ -19,11 +19,16 @@
 package org.apache.hyracks.api.util;
 
 import java.io.File;
+import java.io.FileFilter;
 import java.io.FileNotFoundException;
+import java.io.FilenameFilter;
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.NoSuchFileException;
 import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Objects;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hyracks.api.exceptions.ErrorCode;
@@ -132,4 +137,35 @@ public class IoUtil {
         return files;
     }
 
+    /**
+     * Gets a collection of files matching {@code filter} by searching {@code root} directory and
+     * all of its subdirectories
+     *
+     * @param root
+     * @param filter
+     * @return a collection of matching files
+     */
+    public static Collection<File> getMatchingFiles(Path root, FilenameFilter filter) {
+        if (!Files.isDirectory(root)) {
+            throw new IllegalArgumentException("Parameter 'root' is not a directory: " + root);
+        }
+        Objects.requireNonNull(filter);
+        Collection<File> files = new ArrayList<>();
+        FileFilter dirOrMatchingFileFilter = file -> file.isDirectory() || filter.accept(file, file.getName());
+        collectDirFiles(root.toFile(), dirOrMatchingFileFilter, files);
+        return files;
+    }
+
+    private static void collectDirFiles(File dir, FileFilter filter, Collection<File> files) {
+        File[] matchingFiles = dir.listFiles(filter);
+        if (matchingFiles != null) {
+            for (File file : matchingFiles) {
+                if (file.isDirectory()) {
+                    collectDirFiles(file, filter, files);
+                } else {
+                    files.add(file);
+                }
+            }
+        }
+    }
 }

[asterixdb] 11/12: [ASTERIXDB-3007][TEST] Fix PlanVerifierTestBase

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit e8c2042d775666653f87229e952e3db4227678e0
Author: Dmitry Lychagin <dm...@couchbase.com>
AuthorDate: Wed Feb 2 08:42:11 2022 -0800

    [ASTERIXDB-3007][TEST] Fix PlanVerifierTestBase
    
    - user model changes: no
    - storage format changes: no
    - interface changes: no
    
    Details:
    - Fix illegal query plan created by PlanVerifierTestBase
    
    Change-Id: Ib0ab584bd5ff71596d84f769aa8584e9223698ec
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/15125
    Reviewed-by: Michael Blow <mb...@apache.org>
    Tested-by: Michael Blow <mb...@apache.org>
---
 .../hyracks/algebricks/core/algebra/plan/PlanVerifierTestBase.java    | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/test/java/org/apache/hyracks/algebricks/core/algebra/plan/PlanVerifierTestBase.java b/hyracks-fullstack/algebricks/algebricks-core/src/test/java/org/apache/hyracks/algebricks/core/algebra/plan/PlanVerifierTestBase.java
index 7bfbaa9..65d6460 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/test/java/org/apache/hyracks/algebricks/core/algebra/plan/PlanVerifierTestBase.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/test/java/org/apache/hyracks/algebricks/core/algebra/plan/PlanVerifierTestBase.java
@@ -92,7 +92,9 @@ public abstract class PlanVerifierTestBase {
     Mutable<ILogicalOperator> createSamplePlan1() {
         AssignOperator op1 = newAssign(newVar(), newMutable(ConstantExpression.TRUE));
 
-        SubplanOperator op2 = new SubplanOperator(newAssign(newVar(), newMutable(ConstantExpression.TRUE)));
+        AssignOperator r1 = newAssign(newVar(), newMutable(ConstantExpression.TRUE));
+        r1.getInputs().add(new MutableObject<>(new EmptyTupleSourceOperator()));
+        SubplanOperator op2 = new SubplanOperator(r1);
         op1.getInputs().add(newMutable(op2));
 
         InnerJoinOperator op3 = new InnerJoinOperator(newMutable(ConstantExpression.TRUE));

[asterixdb] 04/12: Merge release 0.9.7

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 2582481322742d8c06303340ec051ad1dd57345f
Author: Ian Maxon <ia...@maxons.email>
AuthorDate: Thu Jan 27 11:49:12 2022 -0800

    Merge release 0.9.7
    
    (cherry picked from commit 4ba929313)
    
    Change-Id:If97ac8dad8df435c9a0c15ef4d7a7ef12bb3acde
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/15043
    Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Integration-Tests: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Reviewed-by: Michael Blow <mb...@apache.org>
---
 asterixdb/asterix-active/pom.xml                                    | 2 +-
 asterixdb/asterix-algebra/pom.xml                                   | 2 +-
 asterixdb/asterix-app/pom.xml                                       | 2 +-
 asterixdb/asterix-benchmark/pom.xml                                 | 2 +-
 asterixdb/asterix-client-helper/pom.xml                             | 2 +-
 asterixdb/asterix-common/pom.xml                                    | 2 +-
 asterixdb/asterix-coverage/pom.xml                                  | 2 +-
 asterixdb/asterix-dashboard/pom.xml                                 | 2 +-
 asterixdb/asterix-doc/pom.xml                                       | 2 +-
 asterixdb/asterix-docker/pom.xml                                    | 2 +-
 asterixdb/asterix-examples/pom.xml                                  | 2 +-
 asterixdb/asterix-external-data/pom.xml                             | 2 +-
 asterixdb/asterix-fuzzyjoin/pom.xml                                 | 2 +-
 asterixdb/asterix-geo/pom.xml                                       | 2 +-
 asterixdb/asterix-lang-common/pom.xml                               | 2 +-
 asterixdb/asterix-lang-sqlpp/pom.xml                                | 2 +-
 asterixdb/asterix-license/pom.xml                                   | 2 +-
 .../asterix-grammar-extension-maven-plugin/pom.xml                  | 2 +-
 .../asterix-test-datagenerator-maven-plugin/pom.xml                 | 2 +-
 .../asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml      | 2 +-
 asterixdb/asterix-maven-plugins/pom.xml                             | 2 +-
 .../record-manager-generator-maven-plugin/pom.xml                   | 2 +-
 asterixdb/asterix-metadata/pom.xml                                  | 2 +-
 asterixdb/asterix-om/pom.xml                                        | 2 +-
 asterixdb/asterix-replication/pom.xml                               | 2 +-
 asterixdb/asterix-runtime/pom.xml                                   | 2 +-
 asterixdb/asterix-server/pom.xml                                    | 2 +-
 asterixdb/asterix-spidersilk/pom.xml                                | 2 +-
 asterixdb/asterix-test-framework/pom.xml                            | 2 +-
 asterixdb/asterix-tools/pom.xml                                     | 2 +-
 asterixdb/asterix-transactions/pom.xml                              | 2 +-
 asterixdb/pom.xml                                                   | 6 +++---
 hyracks-fullstack/algebricks/algebricks-common/pom.xml              | 2 +-
 hyracks-fullstack/algebricks/algebricks-compiler/pom.xml            | 2 +-
 hyracks-fullstack/algebricks/algebricks-core/pom.xml                | 2 +-
 hyracks-fullstack/algebricks/algebricks-data/pom.xml                | 2 +-
 hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml            | 2 +-
 hyracks-fullstack/algebricks/algebricks-runtime/pom.xml             | 2 +-
 hyracks-fullstack/algebricks/algebricks-tests/pom.xml               | 2 +-
 hyracks-fullstack/algebricks/pom.xml                                | 2 +-
 hyracks-fullstack/hyracks-fullstack-license/pom.xml                 | 2 +-
 hyracks-fullstack/hyracks/hyracks-api/pom.xml                       | 2 +-
 hyracks-fullstack/hyracks/hyracks-client/pom.xml                    | 2 +-
 hyracks-fullstack/hyracks/hyracks-comm/pom.xml                      | 2 +-
 .../hyracks/hyracks-control/hyracks-control-cc/pom.xml              | 2 +-
 .../hyracks/hyracks-control/hyracks-control-common/pom.xml          | 2 +-
 .../hyracks/hyracks-control/hyracks-control-nc/pom.xml              | 2 +-
 .../hyracks/hyracks-control/hyracks-nc-service/pom.xml              | 2 +-
 hyracks-fullstack/hyracks/hyracks-control/pom.xml                   | 2 +-
 hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/pom.xml     | 2 +-
 hyracks-fullstack/hyracks/hyracks-data/pom.xml                      | 2 +-
 hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml           | 2 +-
 hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml              | 2 +-
 hyracks-fullstack/hyracks/hyracks-dist/pom.xml                      | 2 +-
 hyracks-fullstack/hyracks/hyracks-documentation/pom.xml             | 2 +-
 .../hyracks/hyracks-examples/btree-example/btreeclient/pom.xml      | 2 +-
 .../hyracks/hyracks-examples/btree-example/btreehelper/pom.xml      | 2 +-
 .../hyracks/hyracks-examples/btree-example/btreeserver/pom.xml      | 2 +-
 hyracks-fullstack/hyracks/hyracks-examples/btree-example/pom.xml    | 2 +-
 .../hyracks/hyracks-examples/hyracks-integration-tests/pom.xml      | 2 +-
 .../hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml          | 2 +-
 hyracks-fullstack/hyracks/hyracks-examples/pom.xml                  | 2 +-
 hyracks-fullstack/hyracks/hyracks-examples/text-example/pom.xml     | 2 +-
 .../hyracks/hyracks-examples/text-example/textclient/pom.xml        | 2 +-
 .../hyracks/hyracks-examples/text-example/texthelper/pom.xml        | 2 +-
 .../hyracks/hyracks-examples/text-example/textserver/pom.xml        | 2 +-
 hyracks-fullstack/hyracks/hyracks-examples/tpch-example/pom.xml     | 2 +-
 .../hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml        | 2 +-
 .../hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml        | 2 +-
 hyracks-fullstack/hyracks/hyracks-hdfs/pom.xml                      | 2 +-
 hyracks-fullstack/hyracks/hyracks-http/pom.xml                      | 2 +-
 hyracks-fullstack/hyracks/hyracks-ipc/pom.xml                       | 2 +-
 .../hyracks-virtualcluster-maven-plugin/pom.xml                     | 2 +-
 .../hyracks/hyracks-maven-plugins/license-automation-plugin/pom.xml | 2 +-
 hyracks-fullstack/hyracks/hyracks-maven-plugins/pom.xml             | 2 +-
 hyracks-fullstack/hyracks/hyracks-net/pom.xml                       | 2 +-
 hyracks-fullstack/hyracks/hyracks-server/pom.xml                    | 2 +-
 hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/pom.xml    | 2 +-
 hyracks-fullstack/hyracks/hyracks-storage-am-btree/pom.xml          | 2 +-
 hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml         | 2 +-
 hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/pom.xml      | 2 +-
 hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml     | 2 +-
 .../hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml            | 2 +-
 hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/pom.xml      | 2 +-
 hyracks-fullstack/hyracks/hyracks-storage-am-rtree/pom.xml          | 2 +-
 hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml            | 2 +-
 hyracks-fullstack/hyracks/hyracks-test-support/pom.xml              | 2 +-
 .../hyracks/hyracks-tests/hyracks-dataflow-common-test/pom.xml      | 2 +-
 .../hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml       | 2 +-
 .../hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml     | 2 +-
 .../hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml | 2 +-
 .../hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml        | 2 +-
 .../hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml | 2 +-
 .../hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml | 2 +-
 .../hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml     | 2 +-
 .../hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml       | 2 +-
 hyracks-fullstack/hyracks/hyracks-tests/pom.xml                     | 2 +-
 hyracks-fullstack/hyracks/hyracks-util/pom.xml                      | 2 +-
 hyracks-fullstack/hyracks/pom.xml                                   | 2 +-
 hyracks-fullstack/pom.xml                                           | 2 +-
 pom.xml                                                             | 2 +-
 101 files changed, 103 insertions(+), 103 deletions(-)

diff --git a/asterixdb/asterix-active/pom.xml b/asterixdb/asterix-active/pom.xml
index d47078a..971534f 100644
--- a/asterixdb/asterix-active/pom.xml
+++ b/asterixdb/asterix-active/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <groupId>org.apache.asterix</groupId>
     <artifactId>apache-asterixdb</artifactId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-active</artifactId>
   <properties>
diff --git a/asterixdb/asterix-algebra/pom.xml b/asterixdb/asterix-algebra/pom.xml
index eaa3859..7a32f42 100644
--- a/asterixdb/asterix-algebra/pom.xml
+++ b/asterixdb/asterix-algebra/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-algebra</artifactId>
 
diff --git a/asterixdb/asterix-app/pom.xml b/asterixdb/asterix-app/pom.xml
index 5b8b9e4..51ede69 100644
--- a/asterixdb/asterix-app/pom.xml
+++ b/asterixdb/asterix-app/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-app</artifactId>
   <licenses>
diff --git a/asterixdb/asterix-benchmark/pom.xml b/asterixdb/asterix-benchmark/pom.xml
index 6801276..cffee2e 100644
--- a/asterixdb/asterix-benchmark/pom.xml
+++ b/asterixdb/asterix-benchmark/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-benchmark</artifactId>
 
diff --git a/asterixdb/asterix-client-helper/pom.xml b/asterixdb/asterix-client-helper/pom.xml
index 4f3bfb6..32f0722 100644
--- a/asterixdb/asterix-client-helper/pom.xml
+++ b/asterixdb/asterix-client-helper/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.asterix</groupId>
     <artifactId>apache-asterixdb</artifactId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <properties>
     <root.dir>${basedir}/..</root.dir>
diff --git a/asterixdb/asterix-common/pom.xml b/asterixdb/asterix-common/pom.xml
index 460ef69..e76fb5b 100644
--- a/asterixdb/asterix-common/pom.xml
+++ b/asterixdb/asterix-common/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-common</artifactId>
   <licenses>
diff --git a/asterixdb/asterix-coverage/pom.xml b/asterixdb/asterix-coverage/pom.xml
index 716f200..19b73b9 100644
--- a/asterixdb/asterix-coverage/pom.xml
+++ b/asterixdb/asterix-coverage/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.asterix</groupId>
     <artifactId>apache-asterixdb</artifactId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/asterixdb/asterix-dashboard/pom.xml b/asterixdb/asterix-dashboard/pom.xml
index 27a8164..9d962f8 100644
--- a/asterixdb/asterix-dashboard/pom.xml
+++ b/asterixdb/asterix-dashboard/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <artifactId>asterix-dashboard</artifactId>
diff --git a/asterixdb/asterix-doc/pom.xml b/asterixdb/asterix-doc/pom.xml
index a402d35..d0825d4 100644
--- a/asterixdb/asterix-doc/pom.xml
+++ b/asterixdb/asterix-doc/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
 
   <url>http://asterixdb.apache.org/</url>
diff --git a/asterixdb/asterix-docker/pom.xml b/asterixdb/asterix-docker/pom.xml
index df603c8..6c54337 100644
--- a/asterixdb/asterix-docker/pom.xml
+++ b/asterixdb/asterix-docker/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-docker</artifactId>
 
diff --git a/asterixdb/asterix-examples/pom.xml b/asterixdb/asterix-examples/pom.xml
index 23efed4..7812be0 100644
--- a/asterixdb/asterix-examples/pom.xml
+++ b/asterixdb/asterix-examples/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-examples</artifactId>
 
diff --git a/asterixdb/asterix-external-data/pom.xml b/asterixdb/asterix-external-data/pom.xml
index b8a1206..a5ce71d 100644
--- a/asterixdb/asterix-external-data/pom.xml
+++ b/asterixdb/asterix-external-data/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <licenses>
     <license>
diff --git a/asterixdb/asterix-fuzzyjoin/pom.xml b/asterixdb/asterix-fuzzyjoin/pom.xml
index 66ae271..65ffa19 100644
--- a/asterixdb/asterix-fuzzyjoin/pom.xml
+++ b/asterixdb/asterix-fuzzyjoin/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-fuzzyjoin</artifactId>
 
diff --git a/asterixdb/asterix-geo/pom.xml b/asterixdb/asterix-geo/pom.xml
index 4c26fb3..667db8e 100644
--- a/asterixdb/asterix-geo/pom.xml
+++ b/asterixdb/asterix-geo/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-geo</artifactId>
 
diff --git a/asterixdb/asterix-lang-common/pom.xml b/asterixdb/asterix-lang-common/pom.xml
index cf2f741..e58125c 100644
--- a/asterixdb/asterix-lang-common/pom.xml
+++ b/asterixdb/asterix-lang-common/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/asterixdb/asterix-lang-sqlpp/pom.xml b/asterixdb/asterix-lang-sqlpp/pom.xml
index 2cc1b4c..0248ed7 100644
--- a/asterixdb/asterix-lang-sqlpp/pom.xml
+++ b/asterixdb/asterix-lang-sqlpp/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <licenses>
     <license>
diff --git a/asterixdb/asterix-license/pom.xml b/asterixdb/asterix-license/pom.xml
index 47ad49e..3a6f1c3 100644
--- a/asterixdb/asterix-license/pom.xml
+++ b/asterixdb/asterix-license/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.asterix</groupId>
     <artifactId>apache-asterixdb</artifactId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/pom.xml b/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/pom.xml
index 07c66fb..cd00397 100644
--- a/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/pom.xml
+++ b/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <groupId>org.apache.asterix</groupId>
     <artifactId>asterix-maven-plugins</artifactId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-grammar-extension-maven-plugin</artifactId>
   <packaging>maven-plugin</packaging>
diff --git a/asterixdb/asterix-maven-plugins/asterix-test-datagenerator-maven-plugin/pom.xml b/asterixdb/asterix-maven-plugins/asterix-test-datagenerator-maven-plugin/pom.xml
index 5e9a607..88aaf2b 100644
--- a/asterixdb/asterix-maven-plugins/asterix-test-datagenerator-maven-plugin/pom.xml
+++ b/asterixdb/asterix-maven-plugins/asterix-test-datagenerator-maven-plugin/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <artifactId>asterix-maven-plugins</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
index 780ef82..dccb9d7 100644
--- a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
+++ b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <artifactId>asterix-maven-plugins</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
 
   <packaging>maven-plugin</packaging>
diff --git a/asterixdb/asterix-maven-plugins/pom.xml b/asterixdb/asterix-maven-plugins/pom.xml
index c7a463f..fd15ac5 100644
--- a/asterixdb/asterix-maven-plugins/pom.xml
+++ b/asterixdb/asterix-maven-plugins/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/pom.xml b/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/pom.xml
index 540f49b..ab1b13a 100644
--- a/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/pom.xml
+++ b/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <artifactId>asterix-maven-plugins</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
 
   <packaging>maven-plugin</packaging>
diff --git a/asterixdb/asterix-metadata/pom.xml b/asterixdb/asterix-metadata/pom.xml
index 9cdca0d..61507d1 100644
--- a/asterixdb/asterix-metadata/pom.xml
+++ b/asterixdb/asterix-metadata/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-metadata</artifactId>
   <properties>
diff --git a/asterixdb/asterix-om/pom.xml b/asterixdb/asterix-om/pom.xml
index cf03569..0fed772 100644
--- a/asterixdb/asterix-om/pom.xml
+++ b/asterixdb/asterix-om/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-om</artifactId>
 
diff --git a/asterixdb/asterix-replication/pom.xml b/asterixdb/asterix-replication/pom.xml
index 0a45c33..7e9b4f8 100644
--- a/asterixdb/asterix-replication/pom.xml
+++ b/asterixdb/asterix-replication/pom.xml
@@ -3,7 +3,7 @@
   <parent>
     <groupId>org.apache.asterix</groupId>
     <artifactId>apache-asterixdb</artifactId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-replication</artifactId>
   <licenses>
diff --git a/asterixdb/asterix-runtime/pom.xml b/asterixdb/asterix-runtime/pom.xml
index 8cdffbe..22958ac 100644
--- a/asterixdb/asterix-runtime/pom.xml
+++ b/asterixdb/asterix-runtime/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-runtime</artifactId>
   <properties>
diff --git a/asterixdb/asterix-server/pom.xml b/asterixdb/asterix-server/pom.xml
index b4878dc..1c7208e 100644
--- a/asterixdb/asterix-server/pom.xml
+++ b/asterixdb/asterix-server/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.asterix</groupId>
     <artifactId>apache-asterixdb</artifactId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/asterixdb/asterix-spidersilk/pom.xml b/asterixdb/asterix-spidersilk/pom.xml
index c834acb..656501f 100644
--- a/asterixdb/asterix-spidersilk/pom.xml
+++ b/asterixdb/asterix-spidersilk/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.asterix</groupId>
     <artifactId>apache-asterixdb</artifactId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/asterixdb/asterix-test-framework/pom.xml b/asterixdb/asterix-test-framework/pom.xml
index 1b893ec..c150fae 100644
--- a/asterixdb/asterix-test-framework/pom.xml
+++ b/asterixdb/asterix-test-framework/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-test-framework</artifactId>
 
diff --git a/asterixdb/asterix-tools/pom.xml b/asterixdb/asterix-tools/pom.xml
index c20acb8..2924fc6 100644
--- a/asterixdb/asterix-tools/pom.xml
+++ b/asterixdb/asterix-tools/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-tools</artifactId>
 
diff --git a/asterixdb/asterix-transactions/pom.xml b/asterixdb/asterix-transactions/pom.xml
index 3bb1c11..2749096 100644
--- a/asterixdb/asterix-transactions/pom.xml
+++ b/asterixdb/asterix-transactions/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>apache-asterixdb</artifactId>
     <groupId>org.apache.asterix</groupId>
-    <version>0.9.7-SNAPSHOT</version>
+    <version>0.9.8-SNAPSHOT</version>
   </parent>
   <artifactId>asterix-transactions</artifactId>
   <licenses>
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index 529b6a5..09c5f4a 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -21,7 +21,7 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.asterix</groupId>
   <artifactId>apache-asterixdb</artifactId>
-  <version>0.9.7-SNAPSHOT</version>
+  <version>0.9.8-SNAPSHOT</version>
   <packaging>pom</packaging>
   <url>${implementation.url}</url>
 
@@ -82,8 +82,8 @@
     <skip-npm-touch.stage>none</skip-npm-touch.stage>
 
     <!-- Versions under dependencymanagement or used in many projects via properties -->
-    <algebricks.version>0.3.7-SNAPSHOT</algebricks.version>
-    <hyracks.version>0.3.7-SNAPSHOT</hyracks.version>
+    <algebricks.version>0.3.8-SNAPSHOT</algebricks.version>
+    <hyracks.version>0.3.8-SNAPSHOT</hyracks.version>
     <hadoop.version>3.3.1</hadoop.version>
     <jacoco.version>0.7.6.201602180812</jacoco.version>
     <log4j.version>2.17.1</log4j.version>
diff --git a/hyracks-fullstack/algebricks/algebricks-common/pom.xml b/hyracks-fullstack/algebricks/algebricks-common/pom.xml
index d288ad8..e657319 100644
--- a/hyracks-fullstack/algebricks/algebricks-common/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-common/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>algebricks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml b/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml
index b7c153c..fbe9dbc 100644
--- a/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>algebricks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/algebricks/algebricks-core/pom.xml b/hyracks-fullstack/algebricks/algebricks-core/pom.xml
index 425b6b2..9c81540 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-core/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>algebricks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/algebricks/algebricks-data/pom.xml b/hyracks-fullstack/algebricks/algebricks-data/pom.xml
index 77f9367..f48c59ce 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-data/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>algebricks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml b/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml
index 3b4cc1a..b0ea2fc 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>algebricks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <licenses>
     <license>
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml b/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml
index 586f240..fb8afca 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>algebricks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
index 9f689e3..d11df8a 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>algebricks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/algebricks/pom.xml b/hyracks-fullstack/algebricks/pom.xml
index c961c0c..498581f 100644
--- a/hyracks-fullstack/algebricks/pom.xml
+++ b/hyracks-fullstack/algebricks/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>apache-hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks-fullstack-license/pom.xml b/hyracks-fullstack/hyracks-fullstack-license/pom.xml
index 50f0a0f..7dd97f9 100644
--- a/hyracks-fullstack/hyracks-fullstack-license/pom.xml
+++ b/hyracks-fullstack/hyracks-fullstack-license/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>apache-hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-api/pom.xml b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
index 905d396..fe8fb24 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-client/pom.xml b/hyracks-fullstack/hyracks/hyracks-client/pom.xml
index 895cee2..409da1f 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-client/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-comm/pom.xml b/hyracks-fullstack/hyracks/hyracks-comm/pom.xml
index f9a5060..f73060f 100644
--- a/hyracks-fullstack/hyracks/hyracks-comm/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-comm/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
index 2a26b47..57e22e7 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-control</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
index 2a5db06..65152a9 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-control</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
index 5898622..083d268 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-control</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
index 2048509..c1ee58a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>hyracks-control</artifactId>
     <groupId>org.apache.hyracks</groupId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
 
diff --git a/hyracks-fullstack/hyracks/hyracks-control/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/pom.xml
index e8fb251..bf8bb52 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/pom.xml b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/pom.xml
index 48f3b33..dfc894f 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-data</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <properties>
     <root.dir>${basedir}/../../..</root.dir>
diff --git a/hyracks-fullstack/hyracks/hyracks-data/pom.xml b/hyracks-fullstack/hyracks/hyracks-data/pom.xml
index 0be145a..3d6f62f 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-data/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml
index f2ab957..274cad7 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
index ea9b704..ddb729f 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-dist/pom.xml b/hyracks-fullstack/hyracks/hyracks-dist/pom.xml
index ceeea81..efed967 100644
--- a/hyracks-fullstack/hyracks/hyracks-dist/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dist/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <artifactId>hyracks</artifactId>
     <groupId>org.apache.hyracks</groupId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-documentation/pom.xml b/hyracks-fullstack/hyracks/hyracks-documentation/pom.xml
index ecb80a3..624639c 100644
--- a/hyracks-fullstack/hyracks/hyracks-documentation/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-documentation/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
index c5f2c23..81a3dfc 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks.examples</groupId>
     <artifactId>btree-example</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
index 126ee54..7735cd7 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks.examples</groupId>
     <artifactId>btree-example</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
index 72919ca..75f4efe 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks.examples</groupId>
     <artifactId>btree-example</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/pom.xml
index 47525d0..942f980 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/pom.xml
@@ -27,7 +27,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-examples</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
index 0bb7b66..84ba707 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-examples</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <build>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
index cd55b48..d2242c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-examples</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/pom.xml
index ca9d937..3e3bfe9 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/text-example/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/text-example/pom.xml
index a03ab9b..0135b1a 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/text-example/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/text-example/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-examples</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml
index ef07b62..57ba143 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>text-example</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/text-example/texthelper/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/text-example/texthelper/pom.xml
index dc450a6..223e408 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/text-example/texthelper/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/text-example/texthelper/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>text-example</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml
index 1a276ca..922dfa2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>text-example</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/pom.xml
index b7d1754..dadaef7 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-examples</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
index ec7d353..77eedd0 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>tpch-example</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
index 143d0c8..c408096 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>tpch-example</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/pom.xml b/hyracks-fullstack/hyracks/hyracks-hdfs/pom.xml
index 301b45c..985515c 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-http/pom.xml b/hyracks-fullstack/hyracks/hyracks-http/pom.xml
index 18aec36..466a447 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-http/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <artifactId>hyracks-http</artifactId>
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml b/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml
index 29481e8..caf3f8f 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml b/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
index 0d5e4a3..fc9d0fd 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-maven-plugins</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/pom.xml b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/pom.xml
index 1a2c2b6..16bcc7c 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-maven-plugins</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <packaging>maven-plugin</packaging>
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/pom.xml b/hyracks-fullstack/hyracks/hyracks-maven-plugins/pom.xml
index dde3f05..373aca3 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-net/pom.xml b/hyracks-fullstack/hyracks/hyracks-net/pom.xml
index 9baecb0..506eae7 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-net/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-server/pom.xml b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
index b76e81c..12a723f 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/pom.xml
index 41bd766..29f1dbf 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/pom.xml
index 732ede2..88b84aa 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <licenses>
     <license>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
index 8dde311..82937cf 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <licenses>
     <license>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/pom.xml
index 711932e..deafec3 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <licenses>
     <license>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
index 78c4cf0..7226f3b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <licenses>
     <license>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
index 98e4e6d..3a58d74 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <artifactId>hyracks</artifactId>
     <groupId>org.apache.hyracks</groupId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
     <relativePath>..</relativePath>
   </parent>
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/pom.xml
index 01b478b..7d3eff1 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <licenses>
     <license>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/pom.xml
index 46b21e9..0c92622 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
index 99056a4..a1c4b02 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <licenses>
     <license>
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml b/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
index a0c5aa4..8e4b23f 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-dataflow-common-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-dataflow-common-test/pom.xml
index efbfd87..06381a9 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-dataflow-common-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-dataflow-common-test/pom.xml
@@ -25,7 +25,7 @@
     <parent>
         <groupId>org.apache.hyracks</groupId>
         <artifactId>hyracks-tests</artifactId>
-        <version>0.3.7-SNAPSHOT</version>
+        <version>0.3.8-SNAPSHOT</version>
     </parent>
 
     <build>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
index ce2dde7..f061079 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <build>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
index 68ccad6..dc93d8b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <properties>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
index 0aa4ed3..2e882ac 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <build>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
index 55183f7..b0ea9ab 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <build>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
index 79cfa61..10f1705 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <artifactId>hyracks-tests</artifactId>
     <groupId>org.apache.hyracks</groupId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
     <relativePath>..</relativePath>
   </parent>
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
index 0df9b8a..4629123 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <build>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
index 942d424..3462ec2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
@@ -25,7 +25,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <build>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
index 29c0648..53808b9 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
   <build>
     <plugins>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/pom.xml
index 19f87b2..00c7949 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <licenses>
diff --git a/hyracks-fullstack/hyracks/hyracks-util/pom.xml b/hyracks-fullstack/hyracks/hyracks-util/pom.xml
index f908ee6..7653ad9 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-util/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <artifactId>hyracks</artifactId>
     <groupId>org.apache.hyracks</groupId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <modelVersion>4.0.0</modelVersion>
diff --git a/hyracks-fullstack/hyracks/pom.xml b/hyracks-fullstack/hyracks/pom.xml
index d3fd833..443e8c0 100644
--- a/hyracks-fullstack/hyracks/pom.xml
+++ b/hyracks-fullstack/hyracks/pom.xml
@@ -26,7 +26,7 @@
   <parent>
     <groupId>org.apache.hyracks</groupId>
     <artifactId>apache-hyracks</artifactId>
-    <version>0.3.7-SNAPSHOT</version>
+    <version>0.3.8-SNAPSHOT</version>
   </parent>
 
   <build>
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index e3e8bf4..bb17d0f 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -21,7 +21,7 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hyracks</groupId>
   <artifactId>apache-hyracks</artifactId>
-  <version>0.3.7-SNAPSHOT</version>
+  <version>0.3.8-SNAPSHOT</version>
   <packaging>pom</packaging>
   <name>hyracks-ecosystem-full-stack</name>
   <url>${implementation.url}</url>
diff --git a/pom.xml b/pom.xml
index 1e7a240..eac0b88 100644
--- a/pom.xml
+++ b/pom.xml
@@ -19,7 +19,7 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <artifactId>apache-asterixdb-fullstack</artifactId>
-  <version>0.9.7-SNAPSHOT</version>
+  <version>0.9.8-SNAPSHOT</version>
   <packaging>pom</packaging>
   <name>hyracks-asterix</name>
 

[asterixdb] 08/12: Merge branch 'gerrit/stabilization-5949a1cb71'

Posted by mb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 4ae6e5f3059297847fba8c98dcc47836c763d420
Merge: fa0ddb6 594eb45
Author: Michael Blow <mi...@couchbase.com>
AuthorDate: Tue Feb 1 17:32:15 2022 -0500

    Merge branch 'gerrit/stabilization-5949a1cb71'
    
    Change-Id: I46c5d89fc13ce9b8f3aec55ece7b5281920c8759

 .../PersistentLocalResourceRepository.java         | 48 ++++------------------
 .../java/org/apache/hyracks/api/util/IoUtil.java   | 36 ++++++++++++++++
 2 files changed, 45 insertions(+), 39 deletions(-)