You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by al...@apache.org on 2021/04/07 12:47:56 UTC

[asterixdb] 15/25: [NO ISSUE][OTH] Add an identifier mapper to map identifiers

This is an automated email from the ASF dual-hosted git repository.

alsuliman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 7b2191a9c524eac57b066b8fee3e9ff149c2eca0
Author: Ali Alsuliman <al...@gmail.com>
AuthorDate: Wed Mar 31 23:30:47 2021 -0700

    [NO ISSUE][OTH] Add an identifier mapper to map identifiers
    
    - user model changes: no
    - storage format changes: no
    - interface changes: yes
    
    Details:
    Add an identifier mapper to map identifiers such as "dataset" to allow
    extensions to provide their mapping for the identifier.
    Error messages in the code should use the identifier.
    
    - Add source location and parameters to IFormattedException
    - Add methods to get the source location and parameters to IFormattedException
    - Include the parameters of the warning message in the warning object
    
    Change-Id: I4bd0a602fddeb09a7dfca64ce35f5eee6f3c4777
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/9884
    Reviewed-by: Michael Blow <mb...@apache.org>
    Integration-Tests: Jenkins <je...@fulliautomatix.ics.uci.edu>
    Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
---
 .../operators/physical/InvertedIndexPOperator.java |  4 +-
 .../rules/MetaFunctionToMetaVariableRule.java      |  4 +-
 .../rules/SetAsterixPhysicalOperatorsRule.java     |  5 ++-
 .../asterix/translator/AbstractLangTranslator.java | 21 +++++++----
 .../translator/LangExpressionToPlanTranslator.java | 22 ++++++-----
 .../api/http/server/ConnectorApiServlet.java       |  7 +++-
 .../api/http/server/RebalanceApiServlet.java       |  5 ++-
 .../asterix/app/function/DatasetRewriter.java      |  6 ++-
 .../apache/asterix/app/function/FeedRewriter.java  |  5 ++-
 .../asterix/app/translator/QueryTranslator.java    | 43 ++++++++++++----------
 .../test/resources/runtimets/testsuite_sqlpp.xml   | 26 ++++++-------
 .../asterix/common/api/IIdentifierMapper.java      | 27 ++++++++++++++
 .../asterix/common/config/CompilerProperties.java  |  2 +-
 .../common/config/TransactionProperties.java       |  5 ++-
 .../asterix/common/exceptions/WarningUtil.java     |  4 +-
 .../common/utils/IdentifierMappingUtil.java        | 41 +++++++++++++++++++++
 .../asterix/common/utils/IdentifierUtil.java       | 34 +++++++++++++++++
 .../src/main/resources/asx_errormsg/en.properties  |  4 +-
 .../asterix-doc/src/main/markdown/sqlpp/5_error.md |  2 +-
 .../org/apache/asterix/metadata/MetadataNode.java  | 15 ++++----
 .../metadata/declared/MetadataManagerUtil.java     |  8 +++-
 .../metadata/declared/MetadataProvider.java        | 18 +++++----
 .../apache/asterix/metadata/entities/Function.java |  7 ++++
 .../asterix/metadata/feeds/FeedMetadataUtil.java   |  9 +++--
 .../apache/asterix/metadata/utils/DatasetUtil.java |  6 ++-
 .../InvertedIndexResourceFactoryProvider.java      |  5 ++-
 .../common/exceptions/AlgebricksException.java     |  2 +
 .../hyracks/api/exceptions/HyracksException.java   |  2 +
 .../api/exceptions/IFormattedException.java        | 11 ++++++
 .../org/apache/hyracks/api/exceptions/Warning.java | 27 ++++++++++++--
 30 files changed, 285 insertions(+), 92 deletions(-)

diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
index 491911b..4ad7d3f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.algebra.operators.physical;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+
 import org.apache.asterix.common.config.OptimizationConfUtil;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.declared.DataSourceId;
@@ -155,7 +157,7 @@ public class InvertedIndexPOperator extends IndexSearchPOperator {
                 dataset.getDataverseName(), dataset.getDatasetName(), indexName);
         if (secondaryIndex == null) {
             throw new AlgebricksException(
-                    "Code generation error: no index " + indexName + " for dataset " + datasetName);
+                    "Code generation error: no index " + indexName + " for " + dataset() + " " + datasetName);
         }
         IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(unnestMap);
         RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/MetaFunctionToMetaVariableRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/MetaFunctionToMetaVariableRule.java
index b6dbbe7..aa2b8fd 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/MetaFunctionToMetaVariableRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/MetaFunctionToMetaVariableRule.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.optimizer.rules;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -254,7 +256,7 @@ class LogicalExpressionReferenceTransform implements ILogicalExpressionReference
         // the user query provides zero parameter for the meta function.
         if (variableRequired) {
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, expr.getSourceLocation(),
-                    "Cannot resolve ambiguous meta function call. There are more than one dataset choice!");
+                    "Cannot resolve ambiguous meta function call. There are more than one " + dataset() + " choice");
         }
         VariableReferenceExpression metaVarRef = new VariableReferenceExpression(metaVar);
         metaVarRef.setSourceLocation(expr.getSourceLocation());
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
index d466446..5ec1a7f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.optimizer.rules;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -265,7 +267,8 @@ public final class SetAsterixPhysicalOperatorsRule extends SetAlgebricksPhysical
             INodeDomain storageDomain = mp.findNodeDomain(dataset.getNodeGroupName());
             if (dsi == null) {
                 throw new CompilationException(ErrorCode.COMPILATION_ERROR, op.getSourceLocation(),
-                        "Could not find index " + jobGenParams.getIndexName() + " for dataset " + dataSourceId);
+                        "Could not find index " + jobGenParams.getIndexName() + " for " + dataset() + " "
+                                + dataSourceId);
             }
             IndexType indexType = jobGenParams.getIndexType();
             boolean requiresBroadcast = jobGenParams.getRequiresBroadcast();
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
index 58bdaca..6f07718 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
@@ -18,6 +18,9 @@
  */
 package org.apache.asterix.translator;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+import static org.apache.asterix.common.utils.IdentifierUtil.dataverse;
+
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Map;
@@ -57,6 +60,7 @@ import org.apache.asterix.lang.common.statement.UpsertStatement;
 import org.apache.asterix.metadata.dataset.hints.DatasetHints;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.utils.MetadataConstants;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -73,11 +77,12 @@ public abstract class AbstractLangTranslator {
 
     protected static final String INVALID_OPERATION_MESSAGE = "Invalid operation - %s";
 
-    protected static final String BAD_DATAVERSE_DML_MESSAGE = "%s operation is not permitted in dataverse %s";
+    protected static final String BAD_DATAVERSE_DML_MESSAGE = "%s operation is not permitted in " + dataverse() + " %s";
 
-    protected static final String BAD_DATAVERSE_DDL_MESSAGE = "Cannot %s dataverse: %s";
+    protected static final String BAD_DATAVERSE_DDL_MESSAGE = "Cannot %s " + dataverse() + ": %s";
 
-    protected static final String BAD_DATAVERSE_OBJECT_DDL_MESSAGE = "Cannot %s a %s belonging to the dataverse: %s";
+    protected static final String BAD_DATAVERSE_OBJECT_DDL_MESSAGE =
+            "Cannot %s a %s belonging to the " + dataverse() + ": %s";
 
     public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt)
             throws AlgebricksException {
@@ -210,7 +215,7 @@ public abstract class AbstractLangTranslator {
                 }
                 invalidOperation = isMetadataDataverse(dataverseName);
                 if (invalidOperation) {
-                    message = String.format(BAD_DATAVERSE_OBJECT_DDL_MESSAGE, "create", "dataset", dataverseName);
+                    message = String.format(BAD_DATAVERSE_OBJECT_DDL_MESSAGE, "create", dataset(), dataverseName);
                 }
 
                 if (!invalidOperation) {
@@ -221,9 +226,9 @@ public abstract class AbstractLangTranslator {
                             Pair<Boolean, String> validationResult =
                                     DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
                             if (!validationResult.first) {
-                                errorMsgBuffer.append("Dataset: ").append(dsCreateStmt.getName().getValue())
-                                        .append(" error in processing hint: ").append(hint.getKey()).append(" ")
-                                        .append(validationResult.second);
+                                errorMsgBuffer.append(StringUtils.capitalize(dataset())).append(": ")
+                                        .append(dsCreateStmt.getName().getValue()).append(" error in processing hint: ")
+                                        .append(hint.getKey()).append(" ").append(validationResult.second);
                                 errorMsgBuffer.append(" \n");
                             }
                         }
@@ -242,7 +247,7 @@ public abstract class AbstractLangTranslator {
                 }
                 invalidOperation = isMetadataDataverse(dataverseName);
                 if (invalidOperation) {
-                    message = String.format(BAD_DATAVERSE_OBJECT_DDL_MESSAGE, "drop", "dataset", dataverseName);
+                    message = String.format(BAD_DATAVERSE_OBJECT_DDL_MESSAGE, "drop", dataset(), dataverseName);
                 }
                 break;
 
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index fd689a5..da07acb 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.translator;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -206,8 +208,8 @@ abstract class LangExpressionToPlanTranslator
                 validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName(), sourceLoc);
         List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys();
         if (dataset.hasMetaPart()) {
-            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                    dataset.getDatasetName() + ": load dataset is not supported on Datasets with Meta records");
+            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, dataset.getDatasetName() + ": load "
+                    + dataset() + " is not supported on " + dataset() + "s with meta records");
         }
 
         LoadableDataSource lds;
@@ -430,8 +432,8 @@ abstract class LangExpressionToPlanTranslator
         SourceLocation sourceLoc = stmt.getSourceLocation();
         if (targetDatasource.getDataset().hasMetaPart()) {
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                    targetDatasource.getDataset().getDatasetName()
-                            + ": delete from dataset is not supported on Datasets with Meta records");
+                    targetDatasource.getDataset().getDatasetName() + ": delete from " + dataset()
+                            + " is not supported on " + dataset() + "s with meta records");
         }
 
         List<String> filterField = DatasetUtil.getFilterField(targetDatasource.getDataset());
@@ -461,8 +463,8 @@ abstract class LangExpressionToPlanTranslator
         SourceLocation sourceLoc = stmt.getSourceLocation();
         if (!targetDatasource.getDataset().allow(topOp, DatasetUtil.OP_UPSERT)) {
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                    targetDatasource.getDataset().getDatasetName()
-                            + ": upsert into dataset is not supported on Datasets with Meta records");
+                    targetDatasource.getDataset().getDatasetName() + ": upsert into " + dataset()
+                            + " is not supported on " + dataset() + "s with meta records");
         }
         ProjectOperator project = (ProjectOperator) topOp;
         CompiledUpsertStatement compiledUpsert = (CompiledUpsertStatement) stmt;
@@ -474,7 +476,7 @@ abstract class LangExpressionToPlanTranslator
         if (targetDatasource.getDataset().hasMetaPart()) {
             if (returnExpression != null) {
                 throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                        "Returning not allowed on datasets with Meta records");
+                        "Returning not allowed on " + dataset() + "s with meta records");
             }
             List<LogicalVariable> metaAndKeysVars;
             List<Mutable<ILogicalExpression>> metaAndKeysExprs;
@@ -585,8 +587,8 @@ abstract class LangExpressionToPlanTranslator
         SourceLocation sourceLoc = stmt.getSourceLocation();
         if (targetDatasource.getDataset().hasMetaPart()) {
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                    targetDatasource.getDataset().getDatasetName()
-                            + ": insert into dataset is not supported on Datasets with Meta records");
+                    targetDatasource.getDataset().getDatasetName() + ": insert into " + dataset()
+                            + " is not supported on " + dataset() + "s with meta records");
         }
 
         List<String> filterField = DatasetUtil.getFilterField(targetDatasource.getDataset());
@@ -683,7 +685,7 @@ abstract class LangExpressionToPlanTranslator
         }
         if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                    "Cannot write output to an external dataset.");
+                    "Cannot write output to an external " + dataset());
         }
         DataSourceId sourceId = new DataSourceId(dataverseName, datasetName);
         IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
index 2fafcfc..353b849 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
@@ -19,6 +19,8 @@
 package org.apache.asterix.api.http.server;
 
 import static org.apache.asterix.api.http.server.ServletConstants.HYRACKS_CONNECTION_ATTR;
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+import static org.apache.asterix.common.utils.IdentifierUtil.dataverse;
 
 import java.io.IOException;
 import java.io.PrintWriter;
@@ -34,6 +36,7 @@ import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.utils.FlushDatasetUtil;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.client.NodeControllerInfo;
 import org.apache.hyracks.api.io.FileSplit;
@@ -98,8 +101,8 @@ public class ConnectorApiServlet extends AbstractServlet {
                 metadataProvider.setMetadataTxnContext(mdTxnCtx);
                 Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
                 if (dataset == null) {
-                    jsonResponse.put("error",
-                            "Dataset " + datasetName + " does not exist in " + "dataverse " + dataverseName);
+                    jsonResponse.put("error", StringUtils.capitalize(dataset()) + " " + datasetName
+                            + " does not exist in " + dataverse() + " " + dataverseName);
                     out.write(jsonResponse.toString());
                     out.flush();
                     return;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
index 8b04cc3..c4d664b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
@@ -19,6 +19,7 @@
 package org.apache.asterix.api.http.server;
 
 import static org.apache.asterix.api.http.server.ServletConstants.HYRACKS_CONNECTION_ATTR;
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
 
 import java.io.PrintWriter;
 import java.nio.charset.StandardCharsets;
@@ -121,13 +122,13 @@ public class RebalanceApiServlet extends AbstractServlet {
             // If a user gives parameter datasetName, she should give dataverseName as well.
             if (dataverseName == null && datasetName != null) {
                 sendResponse(response, HttpResponseStatus.BAD_REQUEST,
-                        "to rebalance a particular dataset, the parameter dataverseName must be given");
+                        "to rebalance a particular " + dataset() + ", the parameter dataverseName must be given");
                 return;
             }
 
             // Does not allow rebalancing a metadata dataset.
             if (MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverseName)) {
-                sendResponse(response, HttpResponseStatus.BAD_REQUEST, "cannot rebalance a metadata dataset");
+                sendResponse(response, HttpResponseStatus.BAD_REQUEST, "cannot rebalance a metadata " + dataset());
                 return;
             }
             // Schedules a rebalance task and wait for its completion.
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java
index 974cd9e..0d20d51 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.app.function;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -67,7 +69,7 @@ public class DatasetRewriter implements IFunctionToDataSourceRewriter, IResultTy
         if (unnest.getPositionalVariable() != null) {
             // TODO remove this after enabling the support of positional variables in data scan
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, unnest.getSourceLocation(),
-                    "No positional variables are allowed over datasets.");
+                    "No positional variables are allowed over " + dataset() + "s");
         }
 
         MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
@@ -122,7 +124,7 @@ public class DatasetRewriter implements IFunctionToDataSourceRewriter, IResultTy
         IAType type = metadata.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
         if (type == null) {
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, datasetFnCall.getSourceLocation(),
-                    "No type for dataset " + dataset.getDatasetName());
+                    "No type for " + dataset() + " " + dataset.getDatasetName());
         }
         return type;
     }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java
index 90374c3..b01ea65 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.app.function;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -129,7 +131,8 @@ public class FeedRewriter implements IFunctionToDataSourceRewriter, IResultTypeC
         if (dataset.hasMetaPart()) {
             String metaTypeName = FeedUtils.getFeedMetaTypeName(sourceFeed.getConfiguration());
             if (metaTypeName == null) {
-                throw new AlgebricksException("Feed to a dataset with metadata doesn't have meta type specified");
+                throw new AlgebricksException(
+                        "Feed to a " + dataset() + " with metadata doesn't have meta type specified");
             }
             metaType = (ARecordType) metadataProvider.findType(id.getDataverseName(), metaTypeName);
         }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index ab682c1..d8698f9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -18,6 +18,9 @@
  */
 package org.apache.asterix.app.translator;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+import static org.apache.asterix.common.utils.IdentifierUtil.dataverse;
+
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.InputStream;
@@ -576,7 +579,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                 (ILSMMergePolicyFactory) Class.forName(compactionPolicyFactoryClassName).newInstance();
         if (isExternalDataset && mergePolicyFactory.getName().compareTo("correlated-prefix") == 0) {
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                    "The correlated-prefix merge policy cannot be used with external dataset.");
+                    "The correlated-prefix merge policy cannot be used with external " + dataset() + "s");
         }
         if (compactionPolicyProperties == null) {
             if (mergePolicyFactory.getName().compareTo("no-merge") != 0) {
@@ -813,8 +816,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                             TransactionState.COMMIT);
                     break;
                 default:
-                    throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                            "Unknown dataset type " + dsType);
+                    throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_DATASET_TYPE,
+                            dataset.getDatasetType().toString());
             }
 
             // #. initialize DatasetIdFactory if it is not initialized.
@@ -921,7 +924,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             SourceLocation sourceLoc) throws AlgebricksException {
         if (itemType.getTypeTag() != ATypeTag.OBJECT) {
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                    String.format("Dataset %s has to be a record type.", isMetaItemType ? "meta type" : "type"));
+                    String.format(StringUtils.capitalize(dataset()) + " %s has to be a record type.",
+                            isMetaItemType ? "meta type" : "type"));
         }
     }
 
@@ -1162,13 +1166,13 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                 // Check if the dataset is indexible
                 if (!ExternalIndexingOperations.isIndexible((ExternalDatasetDetails) ds.getDatasetDetails())) {
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                            "dataset using " + ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter()
-                                    + " Adapter can't be indexed");
+                            dataset() + " using " + ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter()
+                                    + " adapter can't be indexed");
                 }
                 // Check if the name of the index is valid
                 if (!ExternalIndexingOperations.isValidIndexName(index.getDatasetName(), index.getIndexName())) {
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                            "external dataset index name is invalid");
+                            "external " + dataset() + " index name is invalid");
                 }
 
                 // Check if the files index exist
@@ -1209,7 +1213,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                             metadataProvider);
                     if (spec == null) {
                         throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "Failed to create job spec for replicating Files Index For external dataset");
+                                "Failed to create job spec for replicating files index for external " + dataset());
                     }
                     filesIndexReplicated = true;
                     runJob(hcc, spec, jobFlags);
@@ -1457,7 +1461,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)
                 || dataverseName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                    dataverseName + " dataverse can't be dropped");
+                    dataverseName + " " + dataverse() + " can't be dropped");
         }
         lockUtil.dropDataverseBegin(lockManager, metadataProvider.getLocks(), dataverseName);
         try {
@@ -1824,7 +1828,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                     }
                 } else if (ExternalIndexingOperations.isFileIndex(index)) {
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                            "Dropping a dataset's files index is not allowed.");
+                            "Dropping " + dataset() + " files index is not allowed.");
                 }
                 ensureNonPrimaryIndexDrop(index, sourceLoc);
                 // #. prepare a job to drop the index in NC.
@@ -3044,8 +3048,9 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         ActiveEntityEventsListener listener =
                 (ActiveEntityEventsListener) activeNotificationHandler.getListener(feedId);
         if (listener != null && listener.getState() != ActivityState.STOPPED) {
-            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Feed " + feedId
-                    + " is currently active and connected to the following dataset(s) \n" + listener.toString());
+            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
+                    "Feed " + feedId + " is currently active and connected to the following " + dataset() + "(s) \n"
+                            + listener.toString());
         } else if (listener != null) {
             listener.unregister();
         }
@@ -3071,7 +3076,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             if (feedPolicy == null) {
                 if (!stmtFeedPolicyDrop.getIfExists()) {
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                            "Unknown policy " + policyName + " in dataverse " + dataverseName);
+                            "Unknown policy " + policyName + " in " + dataverse() + " " + dataverseName);
                 }
                 MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                 return;
@@ -3202,7 +3207,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                     feedName, datasetName);
             if (fc != null) {
                 throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                        "Feed" + feedName + " is already connected dataset " + datasetName);
+                        "Feed" + feedName + " is already connected to " + dataset() + " " + datasetName);
             }
             fc = new FeedConnection(dataverseName, feedName, datasetName, appliedFunctions, policyName, whereClauseBody,
                     outputType.getTypeName());
@@ -3286,7 +3291,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
             if (indexes.isEmpty()) {
                 throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                        "Cannot compact the extrenal dataset " + datasetName + " because it has no indexes");
+                        "Cannot compact the external " + dataset() + " " + datasetName + " because it has no indexes");
             }
             Dataverse dataverse =
                     MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dataverseName);
@@ -3599,14 +3604,14 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             }
             // Dataset external ?
             if (ds.getDatasetType() != DatasetType.EXTERNAL) {
-                throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                        "dataset " + datasetName + " in dataverse " + dataverseName + " is not an external dataset");
+                throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, dataset() + " " + datasetName
+                        + " in " + dataverse() + " " + dataverseName + " is not an external " + dataset());
             }
             // Dataset has indexes ?
             indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
             if (indexes.isEmpty()) {
-                throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "External dataset " + datasetName
-                        + " in dataverse " + dataverseName + " doesn't have any index");
+                throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "External " + dataset() + " "
+                        + datasetName + " in " + dataverse() + " " + dataverseName + " doesn't have any index");
             }
 
             // Record transaction time
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index ce440c1..19e0965 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -4855,7 +4855,7 @@
     <test-case FilePath="dml">
       <compilation-unit name="upsert-dataset-with-meta">
         <output-dir compare="Text">upsert-dataset-with-meta</output-dir>
-        <expected-error>upsert into dataset is not supported on Datasets with Meta records</expected-error>
+        <expected-error>upsert into dataset is not supported on datasets with meta record</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="dml">
@@ -4881,13 +4881,13 @@
     <test-case FilePath="dml">
       <compilation-unit name="delete-dataset-with-meta">
         <output-dir compare="Text">delete-dataset-with-meta</output-dir>
-        <expected-error>delete from dataset is not supported on Datasets with Meta records</expected-error>
+        <expected-error>delete from dataset is not supported on datasets with meta records</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="dml">
       <compilation-unit name="insert-dataset-with-meta">
         <output-dir compare="Text">insert-dataset-with-meta</output-dir>
-        <expected-error>insert into dataset is not supported on Datasets with Meta records</expected-error>
+        <expected-error>insert into dataset is not supported on datasets with meta records</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="dml">
@@ -6692,7 +6692,7 @@
         <output-dir compare="Text">partition-by-nonexistent-field</output-dir>
         <expected-error>Field "id" is not found</expected-error>
         <expected-error>Cannot find dataset with name testds in dataverse test</expected-error>
-        <expected-error>Cannot find dataset testds in dataverse test nor an alias with name testds!</expected-error>
+        <expected-error>Cannot find dataset testds in dataverse test nor an alias with name testds</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="misc">
@@ -11968,26 +11968,26 @@
     <test-case FilePath="user-defined-functions">
       <compilation-unit name="bad-function-ddl-1">
         <output-dir compare="Text">bad-function-ddl-1</output-dir>
-        <expected-error>Cannot find dataset TweetMessages in dataverse experiments nor an alias with name TweetMessages!</expected-error>
-        <expected-error>Cannot find dataset TweetMessages2 in dataverse experiments2 nor an alias with name TweetMessages2!</expected-error>
+        <expected-error>Cannot find dataset TweetMessages in dataverse experiments nor an alias with name TweetMessages</expected-error>
+        <expected-error>Cannot find dataset TweetMessages2 in dataverse experiments2 nor an alias with name TweetMessages2</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="user-defined-functions">
       <compilation-unit name="bad-function-ddl-2">
         <output-dir compare="Text">bad-function-ddl-2</output-dir>
-        <expected-error>Cannot find dataset TweetMessages in dataverse experiments2 nor an alias with name TweetMessages!</expected-error>
+        <expected-error>Cannot find dataset TweetMessages in dataverse experiments2 nor an alias with name TweetMessages</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="user-defined-functions">
       <compilation-unit name="bad-function-ddl-3">
         <output-dir compare="Text">bad-function-ddl-3</output-dir>
-        <expected-error>Cannot find dataset TweetMessages in dataverse experiments nor an alias with name TweetMessages!</expected-error>
+        <expected-error>Cannot find dataset TweetMessages in dataverse experiments nor an alias with name TweetMessages</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="user-defined-functions">
       <compilation-unit name="bad-function-ddl-4">
         <output-dir compare="Text">bad-function-ddl-4</output-dir>
-        <expected-error>Cannot find dataset TweetMessages in dataverse experients nor an alias with name TweetMessages!</expected-error>
+        <expected-error>Cannot find dataset TweetMessages in dataverse experients nor an alias with name TweetMessages</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="user-defined-functions">
@@ -12006,13 +12006,13 @@
     <test-case FilePath="user-defined-functions">
       <compilation-unit name="bad-function-ddl-7">
         <output-dir compare="Text">bad-function-ddl-7</output-dir>
-        <expected-error>Cannot find dataset TweetMessaes in dataverse experiments nor an alias with name TweetMessaes!</expected-error>
+        <expected-error>Cannot find dataset TweetMessaes in dataverse experiments nor an alias with name TweetMessaes</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="user-defined-functions">
       <compilation-unit name="bad-function-ddl-8">
         <output-dir compare="Text">bad-function-ddl-8</output-dir>
-        <expected-error>Cannot find dataset TweetMessaes in dataverse experiments nor an alias with name TweetMessaes!</expected-error>
+        <expected-error>Cannot find dataset TweetMessaes in dataverse experiments nor an alias with name TweetMessaes</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="user-defined-functions">
@@ -12500,7 +12500,7 @@
     <test-case FilePath="load">
       <compilation-unit name="dataset-with-meta">
         <output-dir compare="Text">dataset-with-meta</output-dir>
-        <expected-error>ASX1079: Compilation error: DatasetWithMeta: load dataset is not supported on Datasets with Meta records (in line 27, at column 1)</expected-error>
+        <expected-error>ASX1079: Compilation error: DatasetWithMeta: load dataset is not supported on datasets with meta records (in line 27, at column 1)</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="load">
@@ -12868,7 +12868,7 @@
     <test-case FilePath="meta">
       <compilation-unit name="query_dataset_with_meta_failure">
         <output-dir compare="Text">query_dataset_with_meta_failure</output-dir>
-        <expected-error>ASX1079: Compilation error: Cannot resolve ambiguous meta function call. There are more than one dataset choice! (in line 24, at column 7)</expected-error>
+        <expected-error>ASX1079: Compilation error: Cannot resolve ambiguous meta function call. There are more than one dataset choice (in line 24, at column 7)</expected-error>
       </compilation-unit>
     </test-case>
   </test-group>
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java
new file mode 100644
index 0000000..8687239
--- /dev/null
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.common.api;
+
+@FunctionalInterface
+public interface IIdentifierMapper {
+
+    String map(String identifier);
+
+}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
index 39142e5..866d183 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
@@ -83,7 +83,7 @@ public class CompilerProperties extends AbstractProperties {
         COMPILER_EXTERNAL_FIELD_PUSHDOWN(
                 BOOLEAN,
                 AlgebricksConfig.EXTERNAL_FIELD_PUSHDOWN_DEFAULT,
-                "Enable pushdown of field accesses to the external dataset data-scan operator"),
+                "Enable pushdown of field accesses to the external data-scan operator"),
         COMPILER_SUBPLAN_MERGE(
                 BOOLEAN,
                 AlgebricksConfig.SUBPLAN_MERGE_DEFAULT,
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java
index 050a493..d67e9a6 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.common.config;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
 import static org.apache.hyracks.control.common.config.OptionTypes.BOOLEAN;
 import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER_BYTE_UNIT;
 import static org.apache.hyracks.control.common.config.OptionTypes.LONG_BYTE_UNIT;
@@ -41,7 +42,7 @@ public class TransactionProperties extends AbstractProperties {
         TXN_DATASET_CHECKPOINT_INTERVAL(
                 POSITIVE_INTEGER,
                 (int) TimeUnit.MINUTES.toSeconds(60),
-                "The interval (in seconds) after which a dataset is considered idle and persisted to disk"),
+                "The interval (in seconds) after which a " + dataset() + " is considered idle and persisted to disk"),
         TXN_LOG_BUFFER_NUMPAGES(POSITIVE_INTEGER, 8, "The number of pages in the transaction log tail"),
         TXN_LOG_BUFFER_PAGESIZE(
                 INTEGER_BYTE_UNIT,
@@ -65,7 +66,7 @@ public class TransactionProperties extends AbstractProperties {
         TXN_LOCK_ESCALATIONTHRESHOLD(
                 NONNEGATIVE_INTEGER,
                 1000,
-                "The maximum number of entity locks to obtain before upgrading to a dataset lock"),
+                "The maximum number of entity locks to obtain before upgrading to a " + dataset() + " lock"),
         TXN_LOCK_SHRINKTIMER(
                 POSITIVE_INTEGER,
                 5000,
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/WarningUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/WarningUtil.java
index 9dffa94..7b5e4a0 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/WarningUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/WarningUtil.java
@@ -28,7 +28,9 @@ public class WarningUtil {
     private WarningUtil() {
     }
 
-    /** Merges the warnings from the collection argument into the warning collector argument. */
+    /**
+     * Merges the warnings from the collection argument into the warning collector argument.
+     */
     public static void mergeWarnings(Collection<Warning> warnings, IWarningCollector warningsCollector) {
         for (Warning warning : warnings) {
             if (warningsCollector.shouldWarn()) {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java
new file mode 100644
index 0000000..c52f27a
--- /dev/null
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.common.utils;
+
+import org.apache.asterix.common.api.IIdentifierMapper;
+
+public class IdentifierMappingUtil {
+
+    private static final IIdentifierMapper DEFAULT_MAPPER = identifier -> identifier;
+
+    private static IIdentifierMapper mapper = DEFAULT_MAPPER;
+
+    private IdentifierMappingUtil() {
+    }
+
+    public static void setMapper(IIdentifierMapper mapper) {
+        IdentifierMappingUtil.mapper = mapper;
+    }
+
+    public static String map(String key) {
+        return mapper.map(key);
+    }
+
+}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java
new file mode 100644
index 0000000..ebdd740
--- /dev/null
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.common.utils;
+
+public class IdentifierUtil {
+
+    public static final String DATASET = "dataset";
+    public static final String DATAVERSE = "dataverse";
+
+    public static String dataset() {
+        return IdentifierMappingUtil.map(DATASET);
+    }
+
+    public static String dataverse() {
+        return IdentifierMappingUtil.map(DATAVERSE);
+    }
+}
diff --git a/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties b/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
index b9ebeb6..8954766 100644
--- a/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
+++ b/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
@@ -163,8 +163,8 @@
 1073 = Cannot resolve alias reference for undefined identifier %1$s
 1074 = Cannot resolve ambiguous alias reference for identifier %1$s
 1075 = Inside limit clauses, it is disallowed to reference a variable having the same name as any variable bound in the same scope as the limit clause.
-1076 = Cannot find dataset %1$s because there is no dataverse declared, nor an alias with name %1$s!
-1077 = Cannot find dataset %1$s in dataverse %2$s nor an alias with name %1$s!
+1076 = Cannot find dataset %1$s because there is no dataverse declared, nor an alias with name %1$s
+1077 = Cannot find dataset %1$s in dataverse %2$s nor an alias with name %1$s
 1078 = Unexpected operator %1$s in an OperatorExpr starting with %2$s
 1079 = Compilation error: %1$s
 1080 = Cannot find node group with name %1$s
diff --git a/asterixdb/asterix-doc/src/main/markdown/sqlpp/5_error.md b/asterixdb/asterix-doc/src/main/markdown/sqlpp/5_error.md
index ea4c6a15..502647e 100644
--- a/asterixdb/asterix-doc/src/main/markdown/sqlpp/5_error.md
+++ b/asterixdb/asterix-doc/src/main/markdown/sqlpp/5_error.md
@@ -71,7 +71,7 @@ cannot be successfully resolved as a valid field access.
 If we have a typo as above in "customers" that misses the dataset name's ending "s",
 we will get an identifier resolution error as follows:
 
-    ERROR: Code: 1 "ASX1077: Cannot find dataset customer in dataverse Commerce nor an alias with name customer! (in line 2, at column 7)"
+    ERROR: Code: 1 "ASX1077: Cannot find dataset customer in dataverse Commerce nor an alias with name customer (in line 2, at column 7)"
 
 ##### Example
 (Q4.4)
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index 85ab549..3ec8aec 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -19,6 +19,8 @@
 
 package org.apache.asterix.metadata;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+
 import java.rmi.RemoteException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -687,7 +689,7 @@ public class MetadataNode implements IMetadataNode {
             }
             throw new AsterixException(
                     org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS, "node group",
-                    nodeGroupName, "datasets",
+                    nodeGroupName, dataset() + "(s)",
                     datasets.stream().map(DatasetUtil::getFullyQualifiedDisplayName).collect(Collectors.joining(", ")));
         }
         try {
@@ -936,7 +938,7 @@ public class MetadataNode implements IMetadataNode {
                 throw new AsterixException(
                         org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_DATAVERSE_DEPENDENT_EXISTS, "type",
                         TypeUtil.getFullyQualifiedDisplayName(set.getItemTypeDataverseName(), set.getItemTypeName()),
-                        "dataset", DatasetUtil.getFullyQualifiedDisplayName(set));
+                        dataset(), DatasetUtil.getFullyQualifiedDisplayName(set));
             }
             if (set.getMetaItemTypeDataverseName() != null
                     && set.getMetaItemTypeDataverseName().equals(dataverseName)) {
@@ -944,7 +946,7 @@ public class MetadataNode implements IMetadataNode {
                         org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_DATAVERSE_DEPENDENT_EXISTS, "type",
                         TypeUtil.getFullyQualifiedDisplayName(set.getMetaItemTypeDataverseName(),
                                 set.getMetaItemTypeName()),
-                        "dataset", DatasetUtil.getFullyQualifiedDisplayName(set));
+                        dataset(), DatasetUtil.getFullyQualifiedDisplayName(set));
             }
         }
 
@@ -964,9 +966,8 @@ public class MetadataNode implements IMetadataNode {
                         Function.FunctionDependencyKind functionDependencyKind = functionDependencyKinds[i];
                         throw new AsterixException(
                                 org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_DATAVERSE_DEPENDENT_EXISTS,
-                                functionDependencyKind.toString().toLowerCase(),
-                                functionDependencyKind.getDependencyDisplayName(dependency), "function",
-                                function.getSignature());
+                                functionDependencyKind, functionDependencyKind.getDependencyDisplayName(dependency),
+                                "function", function.getSignature());
                     }
                 }
             }
@@ -1098,7 +1099,7 @@ public class MetadataNode implements IMetadataNode {
             if (set.getItemTypeName().equals(datatypeName) && set.getItemTypeDataverseName().equals(dataverseName)) {
                 throw new AsterixException(
                         org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS, "type",
-                        TypeUtil.getFullyQualifiedDisplayName(dataverseName, datatypeName), "dataset",
+                        TypeUtil.getFullyQualifiedDisplayName(dataverseName, datatypeName), dataset(),
                         DatasetUtil.getFullyQualifiedDisplayName(set));
             }
         }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
index 6ba8ba8..3c02a4f 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
@@ -18,6 +18,9 @@
  */
 package org.apache.asterix.metadata.declared;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+import static org.apache.asterix.common.utils.IdentifierUtil.dataverse;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -74,7 +77,7 @@ public class MetadataManagerUtil {
             return null;
         }
         if (dataverseName == null) {
-            throw new AlgebricksException("Cannot declare output-record-type with no dataverse!");
+            throw new AlgebricksException("Cannot declare output-record-type with no " + dataverse());
         }
         IAType type = findType(mdTxnCtx, dataverseName, outputRecordType);
         if (!(type instanceof ARecordType)) {
@@ -105,7 +108,8 @@ public class MetadataManagerUtil {
             String datasetName) throws AlgebricksException {
         Dataset dataset = findDataset(mdTxnCtx, dataverseName, datasetName);
         if (dataset == null) {
-            throw new AlgebricksException("Unknown dataset " + datasetName + " in dataverse " + dataverseName);
+            throw new AlgebricksException(
+                    "Unknown " + dataset() + " " + datasetName + " in " + dataverse() + " " + dataverseName);
         }
         return dataset;
     }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
index 68def8d..ec63825 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.metadata.declared;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+import static org.apache.asterix.common.utils.IdentifierUtil.dataverse;
 import static org.apache.asterix.metadata.utils.MetadataConstants.METADATA_OBJECT_NAME_INVALID_CHARS;
 
 import java.io.File;
@@ -39,6 +41,7 @@ import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor;
 import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.external.IDataSourceAdapter;
 import org.apache.asterix.common.functions.FunctionSignature;
@@ -586,8 +589,8 @@ public class MetadataProvider implements IMetadataProvider<DataSourceId, String>
         Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
                 dataset.getDatasetName(), indexName);
         if (secondaryIndex == null) {
-            throw new AlgebricksException(
-                    "Code generation error: no index " + indexName + " for dataset " + dataset.getDatasetName());
+            throw new AlgebricksException("Code generation error: no index " + indexName + " for " + dataset() + " "
+                    + dataset.getDatasetName());
         }
         RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc =
@@ -949,8 +952,8 @@ public class MetadataProvider implements IMetadataProvider<DataSourceId, String>
         String datasetName = dataSource.getId().getDatasourceName();
         Dataset dataset = findDataset(dataSource.getId().getDataverseName(), datasetName);
         if (dataset == null) {
-            throw new AlgebricksException(
-                    "Unknown dataset " + datasetName + " in dataverse " + dataSource.getId().getDataverseName());
+            throw new AlgebricksException("Unknown " + dataset() + " " + datasetName + " in " + dataverse() + " "
+                    + dataSource.getId().getDataverseName());
         }
         int numKeys = primaryKeys.size();
         int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
@@ -999,7 +1002,7 @@ public class MetadataProvider implements IMetadataProvider<DataSourceId, String>
             JobSpecification jobSpec, IAType itemType, ITypedAdapterFactory adapterFactory,
             ITupleFilterFactory tupleFilterFactory, long outputLimit) throws AlgebricksException {
         if (itemType.getTypeTag() != ATypeTag.OBJECT) {
-            throw new AlgebricksException("Can only scan datasets of records.");
+            throw new AlgebricksException("Can only scan " + dataset() + "s of records.");
         }
 
         ISerializerDeserializer<?> payloadSerde =
@@ -1054,7 +1057,7 @@ public class MetadataProvider implements IMetadataProvider<DataSourceId, String>
                         keyType = IndexingConstants.getFieldType(j);
                         break;
                     default:
-                        throw new AlgebricksException("Unknown Dataset Type");
+                        throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_DATASET_TYPE, dsType.toString());
                 }
             } catch (AsterixException e) {
                 throw new AlgebricksException(e);
@@ -1416,7 +1419,8 @@ public class MetadataProvider implements IMetadataProvider<DataSourceId, String>
 
         // Sanity checks.
         if (primaryKeys.size() > 1) {
-            throw new AlgebricksException("Cannot create inverted index on dataset with composite primary key.");
+            throw new AlgebricksException(
+                    "Cannot create inverted index on " + dataset() + "s with composite primary key.");
         }
         // The size of secondaryKeys can be two if it receives input from its
         // TokenizeOperator- [token, number of token]
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
index 3cd4346..a9fa024 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.metadata.entities;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -197,5 +199,10 @@ public class Function implements IMetadataEntity<Function> {
         public String getDependencyDisplayName(Triple<DataverseName, String, String> dependency) {
             return dependencyDisplayNameAccessor.apply(dependency);
         }
+
+        @Override
+        public String toString() {
+            return this == DATASET ? dataset() : name().toLowerCase();
+        }
     }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
index d5e941f..92390a7 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.metadata.feeds;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+
 import java.rmi.RemoteException;
 import java.util.Map;
 
@@ -53,6 +55,7 @@ import org.apache.asterix.metadata.entities.Library;
 import org.apache.asterix.metadata.utils.MetadataConstants;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Triple;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -73,12 +76,12 @@ public class FeedMetadataUtil {
             String datasetName) throws AlgebricksException {
         Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
         if (dataset == null) {
-            throw new CompilationException("Unknown target dataset :" + datasetName);
+            throw new CompilationException("Unknown target " + dataset() + " :" + datasetName);
         }
 
         if (!dataset.getDatasetType().equals(DatasetType.INTERNAL)) {
-            throw new CompilationException("Statement not applicable. Dataset " + datasetName
-                    + " is not of required type " + DatasetType.INTERNAL);
+            throw new CompilationException("Statement not applicable. " + StringUtils.capitalize(dataset()) + " "
+                    + datasetName + " is not of required type " + DatasetType.INTERNAL);
         }
         return dataset;
     }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index 4bcc5f0..4ef349b 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -18,6 +18,9 @@
  */
 package org.apache.asterix.metadata.utils;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+import static org.apache.asterix.common.utils.IdentifierUtil.dataverse;
+
 import java.io.DataOutput;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -366,7 +369,8 @@ public class DatasetUtil {
         DataverseName dataverseName = dataverse.getDataverseName();
         Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
         if (dataset == null) {
-            throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
+            throw new AsterixException(
+                    "Could not find " + dataset() + " " + datasetName + " in " + dataverse() + " " + dataverseName);
         }
         JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java
index f2b7558..7a57af2 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.metadata.utils;
 
+import static org.apache.asterix.common.utils.IdentifierUtil.dataset;
+
 import java.util.List;
 import java.util.Map;
 
@@ -75,7 +77,8 @@ public class InvertedIndexResourceFactoryProvider implements IResourceFactoryPro
                     index.getIndexType().name(), dataset.getDatasetType());
         }
         if (numPrimaryKeys > 1) {
-            throw new AsterixException("Cannot create inverted index on dataset with composite primary key.");
+            throw new AsterixException(
+                    "Cannot create inverted index on " + dataset() + "s with composite primary key.");
         }
         if (numSecondaryKeys > 1) {
             throw new AsterixException("Cannot create composite inverted index on multiple fields.");
diff --git a/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java b/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java
index 4b8179c..0b7af91 100644
--- a/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java
+++ b/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java
@@ -113,6 +113,7 @@ public class AlgebricksException extends Exception implements IFormattedExceptio
         return errorCode;
     }
 
+    @Override
     public Serializable[] getParams() {
         return params;
     }
@@ -121,6 +122,7 @@ public class AlgebricksException extends Exception implements IFormattedExceptio
         return nodeId;
     }
 
+    @Override
     public SourceLocation getSourceLocation() {
         return sourceLoc;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksException.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksException.java
index 0769e18..977e5d2 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksException.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksException.java
@@ -130,6 +130,7 @@ public class HyracksException extends IOException implements IFormattedException
         return errorCode;
     }
 
+    @Override
     public Serializable[] getParams() {
         return params;
     }
@@ -138,6 +139,7 @@ public class HyracksException extends IOException implements IFormattedException
         return nodeId;
     }
 
+    @Override
     public SourceLocation getSourceLocation() {
         return sourceLoc;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/IFormattedException.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/IFormattedException.java
index 33b3995..b9d2deb 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/IFormattedException.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/IFormattedException.java
@@ -18,6 +18,7 @@
  */
 package org.apache.hyracks.api.exceptions;
 
+import java.io.Serializable;
 import java.util.Objects;
 import java.util.Optional;
 import java.util.stream.Stream;
@@ -58,6 +59,16 @@ public interface IFormattedException {
     Optional<IError> getError();
 
     /**
+     * @return the source location
+     */
+    SourceLocation getSourceLocation();
+
+    /**
+     * @return the parameters to use when formatting
+     */
+    Serializable[] getParams();
+
+    /**
      * Indicates whether this exception matches the supplied error code
      */
     default boolean matches(IError candidate) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/Warning.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/Warning.java
index ec8f8e9..5f01559 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/Warning.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/Warning.java
@@ -24,26 +24,29 @@ import java.io.IOException;
 import java.io.Serializable;
 import java.util.Objects;
 
+import org.apache.commons.lang3.SerializationUtils;
 import org.apache.hyracks.api.util.ErrorMessageUtil;
 
 public class Warning implements Serializable {
 
-    private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 2L;
     private final String component;
     private final SourceLocation srcLocation;
     private final int code;
     private final String message;
+    private final Serializable[] params;
 
-    private Warning(String component, SourceLocation srcLocation, int code, String message) {
+    private Warning(String component, SourceLocation srcLocation, int code, String message, Serializable... params) {
         this.component = component;
         this.srcLocation = srcLocation;
         this.code = code;
         this.message = message;
+        this.params = params;
     }
 
     public static Warning of(SourceLocation srcLocation, IError code, Serializable... params) {
         return new Warning(code.component(), srcLocation, code.intValue(), ErrorMessageUtil
-                .formatMessage(code.component(), code.intValue(), code.errorMessage(), srcLocation, params));
+                .formatMessage(code.component(), code.intValue(), code.errorMessage(), srcLocation, params), params);
     }
 
     public String getComponent() {
@@ -84,13 +87,25 @@ public class Warning implements Serializable {
         output.writeInt(code);
         output.writeUTF(message);
         SourceLocation.writeFields(srcLocation, output);
+        writeParams(output, params);
+    }
+
+    private static void writeParams(DataOutput output, Serializable[] params) throws IOException {
+        byte[] serialize = SerializationUtils.serialize(params);
+        output.writeInt(serialize.length);
+        output.write(serialize);
     }
 
     public static Warning create(DataInput input) throws IOException {
         String comp = input.readUTF();
         int code = input.readInt();
         String msg = input.readUTF();
-        return new Warning(comp, SourceLocation.create(input), code, msg);
+        SourceLocation sourceLocation = SourceLocation.create(input);
+        int paramsLen = input.readInt();
+        byte[] paramsBytes = new byte[paramsLen];
+        input.readFully(paramsBytes, 0, paramsBytes.length);
+        Serializable[] params = SerializationUtils.deserialize(paramsBytes);
+        return new Warning(comp, sourceLocation, code, msg, params);
     }
 
     @Override
@@ -98,4 +113,8 @@ public class Warning implements Serializable {
         return "Warning{" + "component='" + component + '\'' + ", srcLocation=" + srcLocation + ", code=" + code
                 + ", message='" + message + '\'' + '}';
     }
+
+    public Serializable[] getParams() {
+        return params;
+    }
 }