You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@asterixdb.apache.org by AsterixDB Code Review <do...@asterix-gerrit.ics.uci.edu> on 2022/04/22 18:52:47 UTC

Change in asterixdb[neo]: Merge branch 'gerrit/cheshire-cat'

From Michael Blow <mb...@apache.org>:

Michael Blow has uploaded this change for review. ( https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/16163 )


Change subject: Merge branch 'gerrit/cheshire-cat'
......................................................................

Merge branch 'gerrit/cheshire-cat'

Change-Id: I1b62507d2d2c85220bda15056fe2d2ff4dbb67c6
---
M asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
M asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
M asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
M asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
M asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/RTreeResourceFactoryProvider.java
M asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
M asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/api/PartitionReplica.java
M asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/DeleteFileTask.java
M asterixdb/pom.xml
M hyracks-fullstack/pom.xml
11 files changed, 0 insertions(+), 358 deletions(-)



  git pull ssh://asterix-gerrit.ics.uci.edu:29418/asterixdb refs/changes/63/16163/1

diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
index 5b9c377..e51a539 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
@@ -163,7 +163,6 @@
                     throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND, sourceLoc,
                             LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(partitioningExpr)));
                 }
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                 if (forPrimaryKey) {
                     boolean nullable = KeyFieldTypeUtil.chooseSource(keySourceIndicators, i, recType, metaRecType)
                             .isSubFieldNullable(partitioningExpr);
@@ -175,14 +174,6 @@
                     }
                 } else {
                     fieldType = TypeComputeUtils.getActualType(fieldType);
-=======
-                boolean nullable = KeyFieldTypeUtil.chooseSource(keySourceIndicators, i, recType, metaRecType)
-                        .isSubFieldNullable(partitioningExpr);
-                if (nullable) {
-                    // key field is nullable
-                    throw new CompilationException(ErrorCode.COMPILATION_PRIMARY_KEY_CANNOT_BE_NULLABLE, sourceLoc,
-                            LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(partitioningExpr)));
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 }
                 switch (fieldType.getTypeTag()) {
                     case TINYINT:
@@ -201,12 +192,8 @@
                     case DAYTIMEDURATION:
                         break;
                     case UNION:
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                         throw new CompilationException(ErrorCode.COMPILATION_KEY_CANNOT_BE_NULLABLE, sourceLoc,
                                 keyKindDisplayName,
-=======
-                        throw new CompilationException(ErrorCode.COMPILATION_PRIMARY_KEY_CANNOT_BE_NULLABLE, sourceLoc,
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                                 LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(partitioningExpr)));
                     default:
                         throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_KEY_TYPE, sourceLoc,
@@ -232,7 +219,6 @@
      */
     public static void validateIndexFieldType(IndexType indexType, IAType fieldType, List<String> displayFieldName,
             SourceLocation sourceLoc) throws AlgebricksException {
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         switch (indexType) {
             case ARRAY:
             case BTREE:
@@ -258,23 +244,7 @@
                                         + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(displayFieldName))
                                         + "' which is of type " + fieldType.getTypeTag()
                                         + " cannot be indexed using the BTree index.");
-=======
-        List<IAType> fieldTypes =
-                KeyFieldTypeUtil.getKeyTypes(recType, metaRecType, keyFieldNames, keySourceIndicators);
-        int pos = 0;
-        boolean openFieldCompositeIdx = false;
-        for (IAType fieldType : fieldTypes) {
-            List<String> fieldName = keyFieldNames.get(pos);
-            if (fieldType == null) {
-                fieldType = keyFieldTypes.get(pos);
-                if (keyFieldTypes.get(pos) == BuiltinType.AMISSING) {
-                    throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                            "A field with this name  \""
-                                    + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                    + "\" could not be found.");
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 }
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                 break;
             case RTREE:
                 switch (fieldType.getTypeTag()) {
@@ -295,121 +265,6 @@
                 break;
             case LENGTH_PARTITIONED_NGRAM_INVIX:
                 if (fieldType.getTypeTag() != ATypeTag.STRING) {
-=======
-            } else if (openFieldCompositeIdx) {
-                throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                        "A closed field \"" + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                + "\" could be only in a prefix part of the composite index, containing opened field.");
-            }
-            if (keyFieldTypes.get(pos) != BuiltinType.AMISSING
-                    && fieldType.getTypeTag() != keyFieldTypes.get(pos).getTypeTag()) {
-                throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                        "A field \"" + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName)) + "\" is "
-                                + "already defined with the type \"" + fieldType + "\"");
-            }
-            switch (indexType) {
-                case BTREE:
-                    switch (fieldType.getTypeTag()) {
-                        case TINYINT:
-                        case SMALLINT:
-                        case INTEGER:
-                        case BIGINT:
-                        case FLOAT:
-                        case DOUBLE:
-                        case STRING:
-                        case BINARY:
-                        case DATE:
-                        case TIME:
-                        case DATETIME:
-                        case UNION:
-                        case UUID:
-                        case YEARMONTHDURATION:
-                        case DAYTIMEDURATION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the BTree index.");
-                    }
-                    break;
-                case RTREE:
-                    switch (fieldType.getTypeTag()) {
-                        case POINT:
-                        case LINE:
-                        case RECTANGLE:
-                        case CIRCLE:
-                        case POLYGON:
-                        case GEOMETRY:
-                        case UNION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the RTree index.");
-                    }
-                    break;
-                case LENGTH_PARTITIONED_NGRAM_INVIX:
-                    switch (fieldType.getTypeTag()) {
-                        case STRING:
-                        case UNION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the Length Partitioned N-Gram index.");
-                    }
-                    break;
-                case LENGTH_PARTITIONED_WORD_INVIX:
-                    switch (fieldType.getTypeTag()) {
-                        case STRING:
-                        case MULTISET:
-                        case ARRAY:
-                        case UNION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the Length Partitioned Keyword index.");
-                    }
-                    break;
-                case SINGLE_PARTITION_NGRAM_INVIX:
-                    switch (fieldType.getTypeTag()) {
-                        case STRING:
-                        case UNION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the N-Gram index.");
-                    }
-                    break;
-                case SINGLE_PARTITION_WORD_INVIX:
-                    switch (fieldType.getTypeTag()) {
-                        case STRING:
-                        case MULTISET:
-                        case ARRAY:
-                        case UNION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the Keyword index.");
-                    }
-                    break;
-                default:
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
                             "The field '" + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(displayFieldName))
                                     + "' which is of type " + fieldType.getTypeTag()
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 4e53ffc..ebc8097 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -43,10 +43,7 @@
 import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.ExecutorService;
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
 import java.util.concurrent.locks.ReentrantReadWriteLock;
-=======
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 import java.util.stream.Collectors;
 
 import org.apache.asterix.active.ActivityState;
@@ -207,13 +204,10 @@
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.types.TypeSignature;
 import org.apache.asterix.om.utils.RecordUtil;
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
 import org.apache.asterix.runtime.fulltext.AbstractFullTextFilterDescriptor;
 import org.apache.asterix.runtime.fulltext.FullTextConfigDescriptor;
 import org.apache.asterix.runtime.fulltext.IFullTextFilterDescriptor;
 import org.apache.asterix.runtime.fulltext.StopwordsFullTextFilterDescriptor;
-=======
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 import org.apache.asterix.transaction.management.service.transaction.DatasetIdFactory;
 import org.apache.asterix.translator.AbstractLangTranslator;
 import org.apache.asterix.translator.ClientRequest;
@@ -265,13 +259,9 @@
 import org.apache.hyracks.control.common.controllers.CCConfig;
 import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor.DropOption;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
 import org.apache.hyracks.storage.am.lsm.invertedindex.fulltext.TokenizerCategory;
 import org.apache.hyracks.util.LogRedactionUtil;
 import org.apache.hyracks.util.OptionalBoolean;
-=======
-import org.apache.hyracks.util.LogRedactionUtil;
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 import org.apache.logging.log4j.Level;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
@@ -1185,46 +1175,10 @@
                         throw new CompilationException(ErrorCode.COMPILATION_ERROR, indexedElement.getSourceLocation(),
                                 "Invalid index element");
                     }
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                     inputTypePrime = sourceRecordType;
                     inputTypeNullable = inputTypeMissable = false;
-=======
-                    if (stmtCreateIndex.isEnforced() && !fieldExpr.second.isUnknownable()) {
-                        throw new AsterixException(ErrorCode.INDEX_ILLEGAL_ENFORCED_NON_OPTIONAL, sourceLoc,
-                                LogRedactionUtil.userData(String.valueOf(fieldExpr.first)));
-                    }
-                    // don't allow creating an enforced index on a closed-type field, fields that
-                    // are part of schema.
-                    // get the field type, if it's not null, then the field is closed-type
-                    if (stmtCreateIndex.isEnforced()
-                            && subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size())) != null) {
-                        throw new AsterixException(ErrorCode.INDEX_ILLEGAL_ENFORCED_ON_CLOSED_FIELD, sourceLoc,
-                                LogRedactionUtil.userData(String.valueOf(fieldExpr.first)));
-                    }
-                    if (!isOpen) {
-                        throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "Typed index on '"
-                                        + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldExpr.first))
-                                        + "' " + "field could be created only for open datatype");
-                    }
-                    if (stmtCreateIndex.hasMetaField()) {
-                        throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "Typed open index can only be created on the record part");
-                    }
-                    Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(dataverseName, indexName,
-                            fieldExpr.second.getType(), dataverseName, mdTxnCtx);
-                    TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
-                    fieldType = typeMap.get(typeSignature);
-                    overridesFieldTypes = true;
-                }
-                if (fieldType == null) {
-                    throw new CompilationException(ErrorCode.UNKNOWN_TYPE, sourceLoc,
-                            fieldExpr.second == null ? LogRedactionUtil.userData(String.valueOf(fieldExpr.first))
-                                    : String.valueOf(fieldExpr.second));
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 }
 
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                 // at this point 'inputTypePrime' is either a record, or if we had unnest then it could also be anything else.
                 List<IAType> fieldTypes = new ArrayList<>(projectCount);
                 for (int i = 0; i < projectCount; i++) {
@@ -1327,14 +1281,6 @@
                     IAType fieldType =
                             KeyFieldTypeUtil.makeUnknownableType(fieldTypePrime, fieldTypeNullable, fieldTypeMissable);
                     fieldTypes.add(fieldType);
-=======
-                // try to add the key & its source to the set of keys, if key couldn't be added,
-                // there is a duplicate
-                if (!indexKeysSet
-                        .add(new Pair<>(fieldExpr.first, stmtCreateIndex.getFieldSourceIndicators().get(keyIndex)))) {
-                    throw new AsterixException(ErrorCode.INDEX_ILLEGAL_REPETITIVE_FIELD, sourceLoc,
-                            LogRedactionUtil.userData(String.valueOf(fieldExpr.first)));
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 }
 
                 // Try to add the key & its source to the set of keys for duplicate detection.
@@ -1669,7 +1615,6 @@
                 List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(
                         metadataProvider.getMetadataTxnContext(), index.getDataverseName(), index.getDatasetName());
                 for (Index existingIndex : indexes) {
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                     if (!existingIndex.isEnforced()) {
                         continue;
                     }
@@ -1701,18 +1646,6 @@
                                         + existingIndex.getIndexName() + " on field(s) '"
                                         + LogRedactionUtil.userData(fieldNames) + "' is already defined with type(s) '"
                                         + StringUtils.join(existingIndexKeyFieldTypes, ',') + "'");
-=======
-                    if (existingIndex.getKeyFieldNames().equals(index.getKeyFieldNames())
-                            && !existingIndex.getKeyFieldTypes().equals(index.getKeyFieldTypes())
-                            && existingIndex.isEnforced()) {
-                        String fieldNames = index.getKeyFieldNames().stream().map(RecordUtil::toFullyQualifiedName)
-                                .collect(Collectors.joining(","));
-                        throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "Cannot create index " + index.getIndexName() + " , enforced index "
-                                        + existingIndex.getIndexName() + " on field(s) '"
-                                        + LogRedactionUtil.userData(fieldNames) + "' is already defined with type(s) '"
-                                        + StringUtils.join(existingIndex.getKeyFieldTypes(), ',') + "'");
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                     }
                 }
             }
diff --git a/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml b/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
index c8c68ea..49e4d74 100644
--- a/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -530,61 +530,37 @@
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_1">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the BTree index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the BTree index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_2">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'age' which is of type integer cannot be indexed using the RTree index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'age' which is of type integer cannot be indexed using the RTree index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_3">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_4">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_5">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_6">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
   </test-group>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index e1d8883..1267995 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -7320,11 +7320,7 @@
       <test-case FilePath="open-index-enforced/error-checking">
         <compilation-unit name="index-on-closed-type">
           <output-dir compare="Text">index-on-closed-type</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
           <expected-error>ASX1014: Field 'value' is not found (in line 33, at column 34)</expected-error>
-=======
-          <expected-error>Typed index on 'value' field could be created only for open datatype</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
         </compilation-unit>
       </test-case>
       <test-case FilePath="open-index-enforced/error-checking">
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/RTreeResourceFactoryProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/RTreeResourceFactoryProvider.java
index 5bc103e..aa37ebb 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/RTreeResourceFactoryProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/RTreeResourceFactoryProvider.java
@@ -83,11 +83,7 @@
                 indexDetails.getKeyFieldNames().get(0), recordType).first;
         if (spatialType == null) {
             throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND,
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                     LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(indexDetails.getKeyFieldNames().get(0))));
-=======
-                    LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(index.getKeyFieldNames().get(0))));
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
         }
         List<List<String>> primaryKeyFields = dataset.getPrimaryKeys();
         int numPrimaryKeys = primaryKeyFields.size();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
index 5888f49..6e62262 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
@@ -49,14 +49,11 @@
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
 import org.apache.hyracks.algebricks.common.utils.Triple;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.SourceLocation;
-=======
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 import org.apache.hyracks.util.LogRedactionUtil;
 
 /**
@@ -402,7 +399,6 @@
             if (!index.isSecondaryIndex() || !index.getIndexDetails().isOverridingKeyFieldTypes()) {
                 continue;
             }
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
             switch (Index.IndexCategory.of(index.getIndexType())) {
                 case VALUE:
                     enforcedRecordType = appendValueIndexType(enforcedRecordType,
@@ -419,100 +415,6 @@
                 default:
                     throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE,
                             String.valueOf(index.getIndexType()));
-=======
-            if (index.hasMetaFields()) {
-                throw new AlgebricksException("Indexing an open field is only supported on the record part");
-            }
-            for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
-                // keeps track of a record type and a field name in that record type
-                Deque<Pair<IAType, String>> nestedTypeStack = new ArrayDeque<>();
-                List<String> splits = index.getKeyFieldNames().get(i);
-                IAType nestedFieldType = enforcedRecordType;
-                boolean openRecords = false;
-                String bridgeName = nestedFieldType.getTypeName();
-                int j;
-                // enforcedRecordType must always be/stay as ARecordType
-                validateRecord(enforcedRecordType);
-                // build the stack for the enforced type, stack of a mixture of ARecord and AUnion(ARecord) types
-                // try to build up to the last record field, e.g. for a.b.c.d.e, build up to and including "d"
-                for (j = 1; j < splits.size(); j++) {
-                    nestedTypeStack.push(new Pair<>(nestedFieldType, splits.get(j - 1)));
-                    bridgeName = nestedFieldType.getTypeName();
-                    subFieldName = splits.subList(0, j);
-                    nestedFieldType = ((ARecordType) enforcedRecordType).getSubFieldType(subFieldName);
-                    if (nestedFieldType == null) {
-                        openRecords = true;
-                        break;
-                    }
-                    // nestedFieldType (i.e. nested record field) must be either ARecordType or AUnion(ARecordType)
-                    validateNestedRecord(nestedFieldType, subFieldName);
-                }
-                if (openRecords) {
-                    // create the smallest record
-                    enforcedRecordType = new ARecordType(splits.get(splits.size() - 2),
-                            new String[] { splits.get(splits.size() - 1) },
-                            new IAType[] { AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)) }, true);
-                    // create the open part of the nested field
-                    for (int k = splits.size() - 3; k > (j - 2); k--) {
-                        enforcedRecordType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) },
-                                new IAType[] { AUnionType.createUnknownableType(enforcedRecordType) }, true);
-                    }
-                    // bridge the gap. Update the parent type to include the new optional field, e.g. c.d.e
-                    Pair<IAType, String> gapPair = nestedTypeStack.pop();
-                    ARecordType parent = (ARecordType) TypeComputeUtils.getActualType(gapPair.first);
-
-                    // parent type must be "open" to allow inclusion of the non-declared field
-                    IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(),
-                            new IAType[] { AUnionType.createUnknownableType(enforcedRecordType) });
-                    enforcedRecordType = new ARecordType(bridgeName,
-                            ArrayUtils.addAll(parent.getFieldNames(), enforcedRecordType.getTypeName()),
-                            parentFieldTypes, true);
-                    // make nullable/missable if the original parent was nullable/missable
-                    enforcedRecordType = keepUnknown(gapPair.first, (ARecordType) enforcedRecordType);
-                } else {
-                    // schema is closed all the way to the field. Enforced fields are either null or strongly typed
-                    // e.g. nestedFieldType = a.b.c.d
-                    ARecordType lastNestedRecord = (ARecordType) TypeComputeUtils.getActualType(nestedFieldType);
-                    Map<String, IAType> recordNameTypesMap = TypeUtil.createRecordNameTypeMap(lastNestedRecord);
-                    // if a an enforced field already exists and the type is correct
-                    IAType enforcedFieldType = recordNameTypesMap.get(splits.get(splits.size() - 1));
-                    if (enforcedFieldType != null && enforcedFieldType.getTypeTag() == ATypeTag.UNION
-                            && ((AUnionType) enforcedFieldType).isUnknownableType()) {
-                        enforcedFieldType = ((AUnionType) enforcedFieldType).getActualType();
-                    }
-                    if (enforcedFieldType != null && !ATypeHierarchy.canPromote(enforcedFieldType.getTypeTag(),
-                            index.getKeyFieldTypes().get(i).getTypeTag())) {
-                        throw new AsterixException(ErrorCode.COMPILATION_ERROR,
-                                "Cannot enforce field '"
-                                        + LogRedactionUtil.userData(String.join(".", index.getKeyFieldNames().get(i)))
-                                        + "' to have type " + index.getKeyFieldTypes().get(i));
-                    }
-                    if (enforcedFieldType == null) {
-                        recordNameTypesMap.put(splits.get(splits.size() - 1),
-                                AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)));
-                    }
-                    enforcedRecordType = new ARecordType(lastNestedRecord.getTypeName(),
-                            recordNameTypesMap.keySet().toArray(new String[recordNameTypesMap.size()]),
-                            recordNameTypesMap.values().toArray(new IAType[recordNameTypesMap.size()]),
-                            lastNestedRecord.isOpen());
-                    // make nullable/missable if the original nestedFieldType was nullable/missable
-                    enforcedRecordType = keepUnknown(nestedFieldType, (ARecordType) enforcedRecordType);
-                }
-
-                // Create the enforced type for the nested fields in the schema, from the ground up
-                if (!nestedTypeStack.isEmpty()) {
-                    while (!nestedTypeStack.isEmpty()) {
-                        Pair<IAType, String> nestedType = nestedTypeStack.pop();
-                        ARecordType nestedRecType = (ARecordType) TypeComputeUtils.getActualType(nestedType.first);
-                        IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone();
-                        nestedRecTypeFieldTypes[nestedRecType.getFieldIndex(nestedType.second)] = enforcedRecordType;
-                        enforcedRecordType = new ARecordType(nestedRecType.getTypeName() + "_enforced",
-                                nestedRecType.getFieldNames(), nestedRecTypeFieldTypes, nestedRecType.isOpen());
-                        // make nullable/missable if the original nestedRecType was nullable/missable
-                        enforcedRecordType = keepUnknown(nestedType.first, (ARecordType) enforcedRecordType);
-                    }
-                }
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
             }
         }
 
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/api/PartitionReplica.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/api/PartitionReplica.java
index 430bc08..27da909 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/api/PartitionReplica.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/api/PartitionReplica.java
@@ -94,11 +94,7 @@
         syncFuture = threadExecutor.submit(() -> {
             try {
                 Thread.currentThread().setName("Replica " + id.toString() + " Synchronizer");
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                 new ReplicaSynchronizer(appCtx, this).sync(register, deltaRecovery);
-=======
-                new ReplicaSynchronizer(appCtx, this).sync();
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 setStatus(IN_SYNC);
             } catch (Exception e) {
                 LOGGER.error(() -> "Failed to sync replica " + this, e);
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/DeleteFileTask.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/DeleteFileTask.java
index ac97262..92e4989 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/DeleteFileTask.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/DeleteFileTask.java
@@ -55,14 +55,11 @@
             final File localFile = ioManager.resolve(file).getFile();
             if (localFile.exists()) {
                 Files.delete(localFile.toPath());
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                 ResourceReference replicaRes = ResourceReference.of(localFile.getAbsolutePath());
                 if (replicaRes.isMetadataResource()) {
                     ((PersistentLocalResourceRepository) appCtx.getLocalResourceRepository())
                             .invalidateResource(replicaRes.getRelativePath().toString());
                 }
-=======
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 LOGGER.debug(() -> "Deleted file: " + localFile.getAbsolutePath());
             } else {
                 LOGGER.warn(() -> "Requested to delete a non-existing file: " + localFile.getAbsolutePath());
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index 0f88553..dc14c7c 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -88,17 +88,12 @@
     <jacoco.version>0.7.6.201602180812</jacoco.version>
     <log4j.version>2.17.1</log4j.version>
     <awsjavasdk.version>2.17.116</awsjavasdk.version>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
     <parquet.version>1.12.0</parquet.version>
     <hadoop-awsjavasdk.version>1.12.109</hadoop-awsjavasdk.version>
     <azureblobjavasdk.version>12.14.2</azureblobjavasdk.version>
     <azuredatalakejavasdk.version>12.7.2</azuredatalakejavasdk.version>
     <gcsjavasdk.version>2.3.0</gcsjavasdk.version>
     <hadoop-azuresdk.version>8.6.6</hadoop-azuresdk.version>
-=======
-    <azurejavasdk.version>12.12.0</azurejavasdk.version>
-    <parquet.version>1.8.2</parquet.version>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 
     <implementation.title>Apache AsterixDB - ${project.name}</implementation.title>
     <implementation.url>https://asterixdb.apache.org/</implementation.url>
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index 1df09d7..9abd300 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -74,11 +74,7 @@
     <jacoco.version>0.7.6.201602180812</jacoco.version>
     <log4j.version>2.17.1</log4j.version>
     <snappy.version>1.1.8.4</snappy.version>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
-    <jackson.version>2.13.1</jackson.version>
-=======
     <jackson.version>2.13.2</jackson.version>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 
     <implementation.title>Apache Hyracks and Algebricks - ${project.name}</implementation.title>
     <implementation.url>https://asterixdb.apache.org/</implementation.url>

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/16163
To unsubscribe, or for help writing mail filters, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-Project: asterixdb
Gerrit-Branch: neo
Gerrit-Change-Id: I1b62507d2d2c85220bda15056fe2d2ff4dbb67c6
Gerrit-Change-Number: 16163
Gerrit-PatchSet: 1
Gerrit-Owner: Michael Blow <mb...@apache.org>
Gerrit-MessageType: newchange

Change in asterixdb[neo]: Merge branch 'gerrit/cheshire-cat'

Posted by AsterixDB Code Review <do...@asterix-gerrit.ics.uci.edu>.
From Michael Blow <mb...@apache.org>:

Michael Blow has uploaded this change for review. ( https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/16163 )


Change subject: Merge branch 'gerrit/cheshire-cat'
......................................................................

Merge branch 'gerrit/cheshire-cat'

Change-Id: I1b62507d2d2c85220bda15056fe2d2ff4dbb67c6
---
M asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
M asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
M asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
M asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
M asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/RTreeResourceFactoryProvider.java
M asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
M asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/api/PartitionReplica.java
M asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/DeleteFileTask.java
M asterixdb/pom.xml
M hyracks-fullstack/pom.xml
11 files changed, 0 insertions(+), 358 deletions(-)



  git pull ssh://asterix-gerrit.ics.uci.edu:29418/asterixdb refs/changes/63/16163/1

diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
index 5b9c377..e51a539 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
@@ -163,7 +163,6 @@
                     throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND, sourceLoc,
                             LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(partitioningExpr)));
                 }
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                 if (forPrimaryKey) {
                     boolean nullable = KeyFieldTypeUtil.chooseSource(keySourceIndicators, i, recType, metaRecType)
                             .isSubFieldNullable(partitioningExpr);
@@ -175,14 +174,6 @@
                     }
                 } else {
                     fieldType = TypeComputeUtils.getActualType(fieldType);
-=======
-                boolean nullable = KeyFieldTypeUtil.chooseSource(keySourceIndicators, i, recType, metaRecType)
-                        .isSubFieldNullable(partitioningExpr);
-                if (nullable) {
-                    // key field is nullable
-                    throw new CompilationException(ErrorCode.COMPILATION_PRIMARY_KEY_CANNOT_BE_NULLABLE, sourceLoc,
-                            LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(partitioningExpr)));
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 }
                 switch (fieldType.getTypeTag()) {
                     case TINYINT:
@@ -201,12 +192,8 @@
                     case DAYTIMEDURATION:
                         break;
                     case UNION:
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                         throw new CompilationException(ErrorCode.COMPILATION_KEY_CANNOT_BE_NULLABLE, sourceLoc,
                                 keyKindDisplayName,
-=======
-                        throw new CompilationException(ErrorCode.COMPILATION_PRIMARY_KEY_CANNOT_BE_NULLABLE, sourceLoc,
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                                 LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(partitioningExpr)));
                     default:
                         throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_KEY_TYPE, sourceLoc,
@@ -232,7 +219,6 @@
      */
     public static void validateIndexFieldType(IndexType indexType, IAType fieldType, List<String> displayFieldName,
             SourceLocation sourceLoc) throws AlgebricksException {
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         switch (indexType) {
             case ARRAY:
             case BTREE:
@@ -258,23 +244,7 @@
                                         + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(displayFieldName))
                                         + "' which is of type " + fieldType.getTypeTag()
                                         + " cannot be indexed using the BTree index.");
-=======
-        List<IAType> fieldTypes =
-                KeyFieldTypeUtil.getKeyTypes(recType, metaRecType, keyFieldNames, keySourceIndicators);
-        int pos = 0;
-        boolean openFieldCompositeIdx = false;
-        for (IAType fieldType : fieldTypes) {
-            List<String> fieldName = keyFieldNames.get(pos);
-            if (fieldType == null) {
-                fieldType = keyFieldTypes.get(pos);
-                if (keyFieldTypes.get(pos) == BuiltinType.AMISSING) {
-                    throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                            "A field with this name  \""
-                                    + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                    + "\" could not be found.");
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 }
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                 break;
             case RTREE:
                 switch (fieldType.getTypeTag()) {
@@ -295,121 +265,6 @@
                 break;
             case LENGTH_PARTITIONED_NGRAM_INVIX:
                 if (fieldType.getTypeTag() != ATypeTag.STRING) {
-=======
-            } else if (openFieldCompositeIdx) {
-                throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                        "A closed field \"" + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                + "\" could be only in a prefix part of the composite index, containing opened field.");
-            }
-            if (keyFieldTypes.get(pos) != BuiltinType.AMISSING
-                    && fieldType.getTypeTag() != keyFieldTypes.get(pos).getTypeTag()) {
-                throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                        "A field \"" + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName)) + "\" is "
-                                + "already defined with the type \"" + fieldType + "\"");
-            }
-            switch (indexType) {
-                case BTREE:
-                    switch (fieldType.getTypeTag()) {
-                        case TINYINT:
-                        case SMALLINT:
-                        case INTEGER:
-                        case BIGINT:
-                        case FLOAT:
-                        case DOUBLE:
-                        case STRING:
-                        case BINARY:
-                        case DATE:
-                        case TIME:
-                        case DATETIME:
-                        case UNION:
-                        case UUID:
-                        case YEARMONTHDURATION:
-                        case DAYTIMEDURATION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the BTree index.");
-                    }
-                    break;
-                case RTREE:
-                    switch (fieldType.getTypeTag()) {
-                        case POINT:
-                        case LINE:
-                        case RECTANGLE:
-                        case CIRCLE:
-                        case POLYGON:
-                        case GEOMETRY:
-                        case UNION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the RTree index.");
-                    }
-                    break;
-                case LENGTH_PARTITIONED_NGRAM_INVIX:
-                    switch (fieldType.getTypeTag()) {
-                        case STRING:
-                        case UNION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the Length Partitioned N-Gram index.");
-                    }
-                    break;
-                case LENGTH_PARTITIONED_WORD_INVIX:
-                    switch (fieldType.getTypeTag()) {
-                        case STRING:
-                        case MULTISET:
-                        case ARRAY:
-                        case UNION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the Length Partitioned Keyword index.");
-                    }
-                    break;
-                case SINGLE_PARTITION_NGRAM_INVIX:
-                    switch (fieldType.getTypeTag()) {
-                        case STRING:
-                        case UNION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the N-Gram index.");
-                    }
-                    break;
-                case SINGLE_PARTITION_WORD_INVIX:
-                    switch (fieldType.getTypeTag()) {
-                        case STRING:
-                        case MULTISET:
-                        case ARRAY:
-                        case UNION:
-                            break;
-                        default:
-                            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                    "The field '"
-                                            + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldName))
-                                            + "' which is of type " + fieldType.getTypeTag()
-                                            + " cannot be indexed using the Keyword index.");
-                    }
-                    break;
-                default:
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
                             "The field '" + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(displayFieldName))
                                     + "' which is of type " + fieldType.getTypeTag()
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 4e53ffc..ebc8097 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -43,10 +43,7 @@
 import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.ExecutorService;
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
 import java.util.concurrent.locks.ReentrantReadWriteLock;
-=======
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 import java.util.stream.Collectors;
 
 import org.apache.asterix.active.ActivityState;
@@ -207,13 +204,10 @@
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.types.TypeSignature;
 import org.apache.asterix.om.utils.RecordUtil;
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
 import org.apache.asterix.runtime.fulltext.AbstractFullTextFilterDescriptor;
 import org.apache.asterix.runtime.fulltext.FullTextConfigDescriptor;
 import org.apache.asterix.runtime.fulltext.IFullTextFilterDescriptor;
 import org.apache.asterix.runtime.fulltext.StopwordsFullTextFilterDescriptor;
-=======
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 import org.apache.asterix.transaction.management.service.transaction.DatasetIdFactory;
 import org.apache.asterix.translator.AbstractLangTranslator;
 import org.apache.asterix.translator.ClientRequest;
@@ -265,13 +259,9 @@
 import org.apache.hyracks.control.common.controllers.CCConfig;
 import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor.DropOption;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
 import org.apache.hyracks.storage.am.lsm.invertedindex.fulltext.TokenizerCategory;
 import org.apache.hyracks.util.LogRedactionUtil;
 import org.apache.hyracks.util.OptionalBoolean;
-=======
-import org.apache.hyracks.util.LogRedactionUtil;
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 import org.apache.logging.log4j.Level;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
@@ -1185,46 +1175,10 @@
                         throw new CompilationException(ErrorCode.COMPILATION_ERROR, indexedElement.getSourceLocation(),
                                 "Invalid index element");
                     }
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                     inputTypePrime = sourceRecordType;
                     inputTypeNullable = inputTypeMissable = false;
-=======
-                    if (stmtCreateIndex.isEnforced() && !fieldExpr.second.isUnknownable()) {
-                        throw new AsterixException(ErrorCode.INDEX_ILLEGAL_ENFORCED_NON_OPTIONAL, sourceLoc,
-                                LogRedactionUtil.userData(String.valueOf(fieldExpr.first)));
-                    }
-                    // don't allow creating an enforced index on a closed-type field, fields that
-                    // are part of schema.
-                    // get the field type, if it's not null, then the field is closed-type
-                    if (stmtCreateIndex.isEnforced()
-                            && subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size())) != null) {
-                        throw new AsterixException(ErrorCode.INDEX_ILLEGAL_ENFORCED_ON_CLOSED_FIELD, sourceLoc,
-                                LogRedactionUtil.userData(String.valueOf(fieldExpr.first)));
-                    }
-                    if (!isOpen) {
-                        throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "Typed index on '"
-                                        + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(fieldExpr.first))
-                                        + "' " + "field could be created only for open datatype");
-                    }
-                    if (stmtCreateIndex.hasMetaField()) {
-                        throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "Typed open index can only be created on the record part");
-                    }
-                    Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(dataverseName, indexName,
-                            fieldExpr.second.getType(), dataverseName, mdTxnCtx);
-                    TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
-                    fieldType = typeMap.get(typeSignature);
-                    overridesFieldTypes = true;
-                }
-                if (fieldType == null) {
-                    throw new CompilationException(ErrorCode.UNKNOWN_TYPE, sourceLoc,
-                            fieldExpr.second == null ? LogRedactionUtil.userData(String.valueOf(fieldExpr.first))
-                                    : String.valueOf(fieldExpr.second));
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 }
 
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                 // at this point 'inputTypePrime' is either a record, or if we had unnest then it could also be anything else.
                 List<IAType> fieldTypes = new ArrayList<>(projectCount);
                 for (int i = 0; i < projectCount; i++) {
@@ -1327,14 +1281,6 @@
                     IAType fieldType =
                             KeyFieldTypeUtil.makeUnknownableType(fieldTypePrime, fieldTypeNullable, fieldTypeMissable);
                     fieldTypes.add(fieldType);
-=======
-                // try to add the key & its source to the set of keys, if key couldn't be added,
-                // there is a duplicate
-                if (!indexKeysSet
-                        .add(new Pair<>(fieldExpr.first, stmtCreateIndex.getFieldSourceIndicators().get(keyIndex)))) {
-                    throw new AsterixException(ErrorCode.INDEX_ILLEGAL_REPETITIVE_FIELD, sourceLoc,
-                            LogRedactionUtil.userData(String.valueOf(fieldExpr.first)));
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 }
 
                 // Try to add the key & its source to the set of keys for duplicate detection.
@@ -1669,7 +1615,6 @@
                 List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(
                         metadataProvider.getMetadataTxnContext(), index.getDataverseName(), index.getDatasetName());
                 for (Index existingIndex : indexes) {
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                     if (!existingIndex.isEnforced()) {
                         continue;
                     }
@@ -1701,18 +1646,6 @@
                                         + existingIndex.getIndexName() + " on field(s) '"
                                         + LogRedactionUtil.userData(fieldNames) + "' is already defined with type(s) '"
                                         + StringUtils.join(existingIndexKeyFieldTypes, ',') + "'");
-=======
-                    if (existingIndex.getKeyFieldNames().equals(index.getKeyFieldNames())
-                            && !existingIndex.getKeyFieldTypes().equals(index.getKeyFieldTypes())
-                            && existingIndex.isEnforced()) {
-                        String fieldNames = index.getKeyFieldNames().stream().map(RecordUtil::toFullyQualifiedName)
-                                .collect(Collectors.joining(","));
-                        throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "Cannot create index " + index.getIndexName() + " , enforced index "
-                                        + existingIndex.getIndexName() + " on field(s) '"
-                                        + LogRedactionUtil.userData(fieldNames) + "' is already defined with type(s) '"
-                                        + StringUtils.join(existingIndex.getKeyFieldTypes(), ',') + "'");
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                     }
                 }
             }
diff --git a/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml b/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
index c8c68ea..49e4d74 100644
--- a/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -530,61 +530,37 @@
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_1">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the BTree index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the BTree index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_2">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'age' which is of type integer cannot be indexed using the RTree index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'age' which is of type integer cannot be indexed using the RTree index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_3">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_4">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_5">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_6">
         <output-dir compare="Text">none</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
         <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 33)</expected-error>
-=======
-        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 1)</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
       </compilation-unit>
     </test-case>
   </test-group>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index e1d8883..1267995 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -7320,11 +7320,7 @@
       <test-case FilePath="open-index-enforced/error-checking">
         <compilation-unit name="index-on-closed-type">
           <output-dir compare="Text">index-on-closed-type</output-dir>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
           <expected-error>ASX1014: Field 'value' is not found (in line 33, at column 34)</expected-error>
-=======
-          <expected-error>Typed index on 'value' field could be created only for open datatype</expected-error>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
         </compilation-unit>
       </test-case>
       <test-case FilePath="open-index-enforced/error-checking">
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/RTreeResourceFactoryProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/RTreeResourceFactoryProvider.java
index 5bc103e..aa37ebb 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/RTreeResourceFactoryProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/RTreeResourceFactoryProvider.java
@@ -83,11 +83,7 @@
                 indexDetails.getKeyFieldNames().get(0), recordType).first;
         if (spatialType == null) {
             throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND,
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                     LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(indexDetails.getKeyFieldNames().get(0))));
-=======
-                    LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(index.getKeyFieldNames().get(0))));
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
         }
         List<List<String>> primaryKeyFields = dataset.getPrimaryKeys();
         int numPrimaryKeys = primaryKeyFields.size();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
index 5888f49..6e62262 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
@@ -49,14 +49,11 @@
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
 import org.apache.hyracks.algebricks.common.utils.Triple;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.SourceLocation;
-=======
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 import org.apache.hyracks.util.LogRedactionUtil;
 
 /**
@@ -402,7 +399,6 @@
             if (!index.isSecondaryIndex() || !index.getIndexDetails().isOverridingKeyFieldTypes()) {
                 continue;
             }
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
             switch (Index.IndexCategory.of(index.getIndexType())) {
                 case VALUE:
                     enforcedRecordType = appendValueIndexType(enforcedRecordType,
@@ -419,100 +415,6 @@
                 default:
                     throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE,
                             String.valueOf(index.getIndexType()));
-=======
-            if (index.hasMetaFields()) {
-                throw new AlgebricksException("Indexing an open field is only supported on the record part");
-            }
-            for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
-                // keeps track of a record type and a field name in that record type
-                Deque<Pair<IAType, String>> nestedTypeStack = new ArrayDeque<>();
-                List<String> splits = index.getKeyFieldNames().get(i);
-                IAType nestedFieldType = enforcedRecordType;
-                boolean openRecords = false;
-                String bridgeName = nestedFieldType.getTypeName();
-                int j;
-                // enforcedRecordType must always be/stay as ARecordType
-                validateRecord(enforcedRecordType);
-                // build the stack for the enforced type, stack of a mixture of ARecord and AUnion(ARecord) types
-                // try to build up to the last record field, e.g. for a.b.c.d.e, build up to and including "d"
-                for (j = 1; j < splits.size(); j++) {
-                    nestedTypeStack.push(new Pair<>(nestedFieldType, splits.get(j - 1)));
-                    bridgeName = nestedFieldType.getTypeName();
-                    subFieldName = splits.subList(0, j);
-                    nestedFieldType = ((ARecordType) enforcedRecordType).getSubFieldType(subFieldName);
-                    if (nestedFieldType == null) {
-                        openRecords = true;
-                        break;
-                    }
-                    // nestedFieldType (i.e. nested record field) must be either ARecordType or AUnion(ARecordType)
-                    validateNestedRecord(nestedFieldType, subFieldName);
-                }
-                if (openRecords) {
-                    // create the smallest record
-                    enforcedRecordType = new ARecordType(splits.get(splits.size() - 2),
-                            new String[] { splits.get(splits.size() - 1) },
-                            new IAType[] { AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)) }, true);
-                    // create the open part of the nested field
-                    for (int k = splits.size() - 3; k > (j - 2); k--) {
-                        enforcedRecordType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) },
-                                new IAType[] { AUnionType.createUnknownableType(enforcedRecordType) }, true);
-                    }
-                    // bridge the gap. Update the parent type to include the new optional field, e.g. c.d.e
-                    Pair<IAType, String> gapPair = nestedTypeStack.pop();
-                    ARecordType parent = (ARecordType) TypeComputeUtils.getActualType(gapPair.first);
-
-                    // parent type must be "open" to allow inclusion of the non-declared field
-                    IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(),
-                            new IAType[] { AUnionType.createUnknownableType(enforcedRecordType) });
-                    enforcedRecordType = new ARecordType(bridgeName,
-                            ArrayUtils.addAll(parent.getFieldNames(), enforcedRecordType.getTypeName()),
-                            parentFieldTypes, true);
-                    // make nullable/missable if the original parent was nullable/missable
-                    enforcedRecordType = keepUnknown(gapPair.first, (ARecordType) enforcedRecordType);
-                } else {
-                    // schema is closed all the way to the field. Enforced fields are either null or strongly typed
-                    // e.g. nestedFieldType = a.b.c.d
-                    ARecordType lastNestedRecord = (ARecordType) TypeComputeUtils.getActualType(nestedFieldType);
-                    Map<String, IAType> recordNameTypesMap = TypeUtil.createRecordNameTypeMap(lastNestedRecord);
-                    // if a an enforced field already exists and the type is correct
-                    IAType enforcedFieldType = recordNameTypesMap.get(splits.get(splits.size() - 1));
-                    if (enforcedFieldType != null && enforcedFieldType.getTypeTag() == ATypeTag.UNION
-                            && ((AUnionType) enforcedFieldType).isUnknownableType()) {
-                        enforcedFieldType = ((AUnionType) enforcedFieldType).getActualType();
-                    }
-                    if (enforcedFieldType != null && !ATypeHierarchy.canPromote(enforcedFieldType.getTypeTag(),
-                            index.getKeyFieldTypes().get(i).getTypeTag())) {
-                        throw new AsterixException(ErrorCode.COMPILATION_ERROR,
-                                "Cannot enforce field '"
-                                        + LogRedactionUtil.userData(String.join(".", index.getKeyFieldNames().get(i)))
-                                        + "' to have type " + index.getKeyFieldTypes().get(i));
-                    }
-                    if (enforcedFieldType == null) {
-                        recordNameTypesMap.put(splits.get(splits.size() - 1),
-                                AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)));
-                    }
-                    enforcedRecordType = new ARecordType(lastNestedRecord.getTypeName(),
-                            recordNameTypesMap.keySet().toArray(new String[recordNameTypesMap.size()]),
-                            recordNameTypesMap.values().toArray(new IAType[recordNameTypesMap.size()]),
-                            lastNestedRecord.isOpen());
-                    // make nullable/missable if the original nestedFieldType was nullable/missable
-                    enforcedRecordType = keepUnknown(nestedFieldType, (ARecordType) enforcedRecordType);
-                }
-
-                // Create the enforced type for the nested fields in the schema, from the ground up
-                if (!nestedTypeStack.isEmpty()) {
-                    while (!nestedTypeStack.isEmpty()) {
-                        Pair<IAType, String> nestedType = nestedTypeStack.pop();
-                        ARecordType nestedRecType = (ARecordType) TypeComputeUtils.getActualType(nestedType.first);
-                        IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone();
-                        nestedRecTypeFieldTypes[nestedRecType.getFieldIndex(nestedType.second)] = enforcedRecordType;
-                        enforcedRecordType = new ARecordType(nestedRecType.getTypeName() + "_enforced",
-                                nestedRecType.getFieldNames(), nestedRecTypeFieldTypes, nestedRecType.isOpen());
-                        // make nullable/missable if the original nestedRecType was nullable/missable
-                        enforcedRecordType = keepUnknown(nestedType.first, (ARecordType) enforcedRecordType);
-                    }
-                }
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
             }
         }
 
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/api/PartitionReplica.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/api/PartitionReplica.java
index 430bc08..27da909 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/api/PartitionReplica.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/api/PartitionReplica.java
@@ -94,11 +94,7 @@
         syncFuture = threadExecutor.submit(() -> {
             try {
                 Thread.currentThread().setName("Replica " + id.toString() + " Synchronizer");
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                 new ReplicaSynchronizer(appCtx, this).sync(register, deltaRecovery);
-=======
-                new ReplicaSynchronizer(appCtx, this).sync();
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 setStatus(IN_SYNC);
             } catch (Exception e) {
                 LOGGER.error(() -> "Failed to sync replica " + this, e);
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/DeleteFileTask.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/DeleteFileTask.java
index ac97262..92e4989 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/DeleteFileTask.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/DeleteFileTask.java
@@ -55,14 +55,11 @@
             final File localFile = ioManager.resolve(file).getFile();
             if (localFile.exists()) {
                 Files.delete(localFile.toPath());
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
                 ResourceReference replicaRes = ResourceReference.of(localFile.getAbsolutePath());
                 if (replicaRes.isMetadataResource()) {
                     ((PersistentLocalResourceRepository) appCtx.getLocalResourceRepository())
                             .invalidateResource(replicaRes.getRelativePath().toString());
                 }
-=======
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
                 LOGGER.debug(() -> "Deleted file: " + localFile.getAbsolutePath());
             } else {
                 LOGGER.warn(() -> "Requested to delete a non-existing file: " + localFile.getAbsolutePath());
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index 0f88553..dc14c7c 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -88,17 +88,12 @@
     <jacoco.version>0.7.6.201602180812</jacoco.version>
     <log4j.version>2.17.1</log4j.version>
     <awsjavasdk.version>2.17.116</awsjavasdk.version>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
     <parquet.version>1.12.0</parquet.version>
     <hadoop-awsjavasdk.version>1.12.109</hadoop-awsjavasdk.version>
     <azureblobjavasdk.version>12.14.2</azureblobjavasdk.version>
     <azuredatalakejavasdk.version>12.7.2</azuredatalakejavasdk.version>
     <gcsjavasdk.version>2.3.0</gcsjavasdk.version>
     <hadoop-azuresdk.version>8.6.6</hadoop-azuresdk.version>
-=======
-    <azurejavasdk.version>12.12.0</azurejavasdk.version>
-    <parquet.version>1.8.2</parquet.version>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 
     <implementation.title>Apache AsterixDB - ${project.name}</implementation.title>
     <implementation.url>https://asterixdb.apache.org/</implementation.url>
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index 1df09d7..9abd300 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -74,11 +74,7 @@
     <jacoco.version>0.7.6.201602180812</jacoco.version>
     <log4j.version>2.17.1</log4j.version>
     <snappy.version>1.1.8.4</snappy.version>
-<<<<<<< HEAD   (758f94 [NO ISSUE][OTH] Redact field name)
-    <jackson.version>2.13.1</jackson.version>
-=======
     <jackson.version>2.13.2</jackson.version>
->>>>>>> BRANCH (c573bc [NO ISSUE][NET] SSL Socket Fixes)
 
     <implementation.title>Apache Hyracks and Algebricks - ${project.name}</implementation.title>
     <implementation.url>https://asterixdb.apache.org/</implementation.url>

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/16163
To unsubscribe, or for help writing mail filters, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-Project: asterixdb
Gerrit-Branch: neo
Gerrit-Change-Id: I1b62507d2d2c85220bda15056fe2d2ff4dbb67c6
Gerrit-Change-Number: 16163
Gerrit-PatchSet: 1
Gerrit-Owner: Michael Blow <mb...@apache.org>
Gerrit-MessageType: newchange