You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by mb...@apache.org on 2017/11/11 18:12:20 UTC

[5/5] asterixdb git commit: [NO ISSUE][FAIL] Replace MetadataException by AlgebricksException in APIs

[NO ISSUE][FAIL] Replace MetadataException by AlgebricksException in APIs

Change-Id: Ia543ac84b698042582249837912d21ddaa48453f
Reviewed-on: https://asterix-gerrit.ics.uci.edu/2134
Sonar-Qube: Jenkins <je...@fulliautomatix.ics.uci.edu>
Reviewed-by: Michael Blow <mb...@apache.org>
Integration-Tests: Jenkins <je...@fulliautomatix.ics.uci.edu>
Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>


Project: http://git-wip-us.apache.org/repos/asf/asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/asterixdb/commit/d25513cc
Tree: http://git-wip-us.apache.org/repos/asf/asterixdb/tree/d25513cc
Diff: http://git-wip-us.apache.org/repos/asf/asterixdb/diff/d25513cc

Branch: refs/heads/master
Commit: d25513ccbf3ae171fbacd5ebfe9367a2b8c5ff96
Parents: 7722e5d
Author: Murtadha Hubail <mh...@apache.org>
Authored: Sat Nov 11 03:08:21 2017 +0300
Committer: Michael Blow <mb...@apache.org>
Committed: Sat Nov 11 10:11:22 2017 -0800

----------------------------------------------------------------------
 .../physical/InvertedIndexPOperator.java        |  82 ++--
 .../LangExpressionToPlanTranslator.java         |  71 ++-
 .../asterix/app/translator/QueryTranslator.java |  67 ++-
 .../asterix/lang/common/util/FunctionUtil.java  |  34 +-
 .../asterix/metadata/MetadataManager.java       | 137 +++---
 .../apache/asterix/metadata/MetadataNode.java   | 489 ++++++++++---------
 .../api/IMetadataEntityTupleTranslator.java     |  24 +-
 .../asterix/metadata/api/IMetadataManager.java  | 269 +++++-----
 .../asterix/metadata/api/IMetadataNode.java     | 336 +++++++------
 .../asterix/metadata/api/IValueExtractor.java   |   7 +-
 .../metadata/bootstrap/MetadataBootstrap.java   |  59 ++-
 .../metadata/declared/MetadataManagerUtil.java  |  47 +-
 .../metadata/declared/MetadataProvider.java     | 423 ++++++++--------
 .../metadata/entities/BuiltinTypeMap.java       |   3 +-
 .../CompactionPolicyTupleTranslator.java        |   4 +-
 .../DatasetTupleTranslator.java                 |  11 +-
 .../DatasourceAdapterTupleTranslator.java       |   6 +-
 .../DatatypeTupleTranslator.java                |  15 +-
 .../DataverseTupleTranslator.java               |   4 +-
 .../ExternalFileTupleTranslator.java            |   6 +-
 .../FeedConnectionTupleTranslator.java          |   6 +-
 .../FeedPolicyTupleTranslator.java              |   4 +-
 .../FeedTupleTranslator.java                    |   4 +-
 .../FunctionTupleTranslator.java                |   4 +-
 .../IndexTupleTranslator.java                   |  13 +-
 .../LibraryTupleTranslator.java                 |   5 +-
 .../NodeGroupTupleTranslator.java               |   4 +-
 .../NodeTupleTranslator.java                    |   3 +-
 .../metadata/feeds/BuiltinFeedPolicies.java     |   5 +-
 .../metadata/feeds/FeedMetadataUtil.java        |  10 +-
 .../functions/ExternalFunctionCompilerUtil.java |  14 +-
 .../asterix/metadata/utils/DatasetUtil.java     | 129 +++--
 .../utils/SplitsAndConstraintsUtil.java         |  15 +-
 .../MetadataEntityValueExtractor.java           |   3 +-
 .../NestedDatatypeNameValueExtractor.java       |   4 +-
 .../TupleCopyValueExtractor.java                |   5 +-
 .../DatasetTupleTranslatorTest.java             |   3 +-
 .../IndexTupleTranslatorTest.java               |   3 +-
 .../asterix/tools/datagen/AdmDataGen.java       |  47 +-
 .../tools/translator/ADGenDmlTranslator.java    |   6 +-
 40 files changed, 1196 insertions(+), 1185 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
index cd0a63c..12114f0 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
@@ -100,19 +100,15 @@ public class InvertedIndexPOperator extends IndexSearchPOperator {
         jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments());
 
         MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
-        Dataset dataset;
-        try {
-            dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
-        } catch (MetadataException e) {
-            throw new AlgebricksException(e);
-        }
+        Dataset dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
         int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas);
 
         int[] minFilterFieldIndexes = getKeyIndexes(unnestMapOp.getMinFilterVars(), inputSchemas);
         int[] maxFilterFieldIndexes = getKeyIndexes(unnestMapOp.getMaxFilterVars(), inputSchemas);
         boolean retainNull = false;
         if (op.getOperatorTag() == LogicalOperatorTag.LEFT_OUTER_UNNEST_MAP) {
-            // By nature, LEFT_OUTER_UNNEST_MAP should generate null values for non-matching tuples.
+            // By nature, LEFT_OUTER_UNNEST_MAP should generate null values for non-matching
+            // tuples.
             retainNull = true;
         }
         // Build runtime.
@@ -135,46 +131,38 @@ public class InvertedIndexPOperator extends IndexSearchPOperator {
             AbstractUnnestMapOperator unnestMap, IOperatorSchema opSchema, boolean retainInput, boolean retainMissing,
             String datasetName, Dataset dataset, String indexName, ATypeTag searchKeyType, int[] keyFields,
             SearchModifierType searchModifierType, IAlgebricksConstantValue similarityThreshold,
-            int[] minFilterFieldIndexes, int[] maxFilterFieldIndexes,
-            boolean isFullTextSearchQuery) throws AlgebricksException {
-        try {
-
-            boolean propagateIndexFilter = unnestMap.propagateIndexFilter();
-            IAObject simThresh = ((AsterixConstantValue) similarityThreshold).getObject();
-            int numPrimaryKeys = dataset.getPrimaryKeys().size();
-            Index secondaryIndex = MetadataManager.INSTANCE
-                    .getIndex(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(),
-                            dataset.getDatasetName(), indexName);
-            if (secondaryIndex == null) {
-                throw new AlgebricksException(
-                        "Code generation error: no index " + indexName + " for dataset " + datasetName);
-            }
-            IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(unnestMap);
-            RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
-            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint =
-                    metadataProvider.getSplitProviderAndConstraints(dataset, indexName);
-            // TODO: Here we assume there is only one search key field.
-            int queryField = keyFields[0];
-            // Get tokenizer and search modifier factories.
-            IInvertedIndexSearchModifierFactory searchModifierFactory =
-                    InvertedIndexAccessMethod.getSearchModifierFactory(searchModifierType, simThresh, secondaryIndex);
-            IBinaryTokenizerFactory queryTokenizerFactory = InvertedIndexAccessMethod
-                    .getBinaryTokenizerFactory(searchModifierType, searchKeyType, secondaryIndex);
-            IIndexDataflowHelperFactory dataflowHelperFactory =
-                    new IndexDataflowHelperFactory(metadataProvider.getStorageComponentProvider().getStorageManager(),
-                            secondarySplitsAndConstraint.first);
-            LSMInvertedIndexSearchOperatorDescriptor invIndexSearchOp =
-                    new LSMInvertedIndexSearchOperatorDescriptor(jobSpec, outputRecDesc, queryField,
-                            dataflowHelperFactory, queryTokenizerFactory, searchModifierFactory, retainInput,
-                            retainMissing, context.getMissingWriterFactory(),
-                            dataset.getSearchCallbackFactory(metadataProvider.getStorageComponentProvider(),
-                                    secondaryIndex,
-                                    ((JobEventListenerFactory) jobSpec.getJobletEventListenerFactory()).getJobId(),
-                                    IndexOperation.SEARCH, null), minFilterFieldIndexes, maxFilterFieldIndexes,
-                            isFullTextSearchQuery, numPrimaryKeys, propagateIndexFilter);
-            return new Pair<>(invIndexSearchOp, secondarySplitsAndConstraint.second);
-        } catch (MetadataException e) {
-            throw new AlgebricksException(e);
+            int[] minFilterFieldIndexes, int[] maxFilterFieldIndexes, boolean isFullTextSearchQuery)
+            throws AlgebricksException {
+        boolean propagateIndexFilter = unnestMap.propagateIndexFilter();
+        IAObject simThresh = ((AsterixConstantValue) similarityThreshold).getObject();
+        int numPrimaryKeys = dataset.getPrimaryKeys().size();
+        Index secondaryIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
+                dataset.getDataverseName(), dataset.getDatasetName(), indexName);
+        if (secondaryIndex == null) {
+            throw new AlgebricksException(
+                    "Code generation error: no index " + indexName + " for dataset " + datasetName);
         }
+        IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(unnestMap);
+        RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint =
+                metadataProvider.getSplitProviderAndConstraints(dataset, indexName);
+        // TODO: Here we assume there is only one search key field.
+        int queryField = keyFields[0];
+        // Get tokenizer and search modifier factories.
+        IInvertedIndexSearchModifierFactory searchModifierFactory =
+                InvertedIndexAccessMethod.getSearchModifierFactory(searchModifierType, simThresh, secondaryIndex);
+        IBinaryTokenizerFactory queryTokenizerFactory =
+                InvertedIndexAccessMethod.getBinaryTokenizerFactory(searchModifierType, searchKeyType, secondaryIndex);
+        IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory(
+                metadataProvider.getStorageComponentProvider().getStorageManager(), secondarySplitsAndConstraint.first);
+        LSMInvertedIndexSearchOperatorDescriptor invIndexSearchOp = new LSMInvertedIndexSearchOperatorDescriptor(
+                jobSpec, outputRecDesc, queryField, dataflowHelperFactory, queryTokenizerFactory, searchModifierFactory,
+                retainInput, retainMissing, context.getMissingWriterFactory(),
+                dataset.getSearchCallbackFactory(metadataProvider.getStorageComponentProvider(), secondaryIndex,
+                        ((JobEventListenerFactory) jobSpec.getJobletEventListenerFactory()).getJobId(),
+                        IndexOperation.SEARCH, null),
+                minFilterFieldIndexes, maxFilterFieldIndexes, isFullTextSearchQuery, numPrimaryKeys,
+                propagateIndexFilter);
+        return new Pair<>(invIndexSearchOp, secondarySplitsAndConstraint.second);
     }
 }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index 6f3ec76..4579e6f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -245,7 +245,8 @@ class LangExpressionToPlanTranslator
         AssignOperator assign = new AssignOperator(pkVars, pkExprs);
         assign.getInputs().add(new MutableObject<>(dssOp));
 
-        // If the input is pre-sorted, we set the ordering property explicitly in the assign
+        // If the input is pre-sorted, we set the ordering property explicitly in the
+        // assign
         if (clffs.alreadySorted()) {
             List<OrderColumn> orderColumns = new ArrayList<>();
             for (int i = 0; i < pkVars.size(); ++i) {
@@ -328,13 +329,13 @@ class LangExpressionToPlanTranslator
             }
         } else {
             /**
-             * add the collection-to-sequence right before the project,
-             * because dataset only accept non-collection records
+             * add the collection-to-sequence right before the project, because dataset only
+             * accept non-collection records
              */
             LogicalVariable seqVar = context.newVar();
             /**
-             * This assign adds a marker function collection-to-sequence: if the input is a singleton collection, unnest
-             * it; otherwise do nothing.
+             * This assign adds a marker function collection-to-sequence: if the input is a
+             * singleton collection, unnest it; otherwise do nothing.
              */
             AssignOperator assignCollectionToSequence = new AssignOperator(seqVar,
                     new MutableObject<>(new ScalarFunctionCallExpression(
@@ -557,7 +558,8 @@ class LangExpressionToPlanTranslator
         return processReturningExpression(rootOperator, insertOp, compiledInsert);
     }
 
-    // Stitches the translated operators for the returning expression into the query plan.
+    // Stitches the translated operators for the returning expression into the query
+    // plan.
     private ILogicalOperator processReturningExpression(ILogicalOperator inputOperator,
             InsertDeleteUpsertOperator insertOp, CompiledInsertStatement compiledInsert) throws AlgebricksException {
         Expression returnExpression = compiledInsert.getReturnExpression();
@@ -566,7 +568,7 @@ class LangExpressionToPlanTranslator
         }
         ILogicalOperator rootOperator = inputOperator;
 
-        //Makes the id of the insert var point to the record variable.
+        // Makes the id of the insert var point to the record variable.
         context.newVarFromExpression(compiledInsert.getVar());
         context.setVar(compiledInsert.getVar(),
                 ((VariableReferenceExpression) insertOp.getPayloadExpression().getValue()).getVariableReference());
@@ -606,7 +608,7 @@ class LangExpressionToPlanTranslator
                 dataset.getDatasetDetails(), domain);
     }
 
-    private FileSplit getDefaultOutputFileLocation(ICcApplicationContext appCtx) throws MetadataException {
+    private FileSplit getDefaultOutputFileLocation(ICcApplicationContext appCtx) throws AlgebricksException {
         String outputDir = System.getProperty("java.io.tmpDir");
         String filePath =
                 outputDir + System.getProperty("file.separator") + OUTPUT_FILE_PREFIX + outputFileID.incrementAndGet();
@@ -702,8 +704,12 @@ class LangExpressionToPlanTranslator
         }
 
         AbstractFunctionCallExpression f;
-        if ((f = lookupUserDefinedFunction(signature, args)) == null) {
-            f = lookupBuiltinFunction(signature.getName(), signature.getArity(), args);
+        try {
+            if ((f = lookupUserDefinedFunction(signature, args)) == null) {
+                f = lookupBuiltinFunction(signature.getName(), signature.getArity(), args);
+            }
+        } catch (AlgebricksException e) {
+            throw new CompilationException(e);
         }
 
         if (f == null) {
@@ -726,7 +732,7 @@ class LangExpressionToPlanTranslator
     }
 
     private AbstractFunctionCallExpression lookupUserDefinedFunction(FunctionSignature signature,
-            List<Mutable<ILogicalExpression>> args) throws MetadataException {
+            List<Mutable<ILogicalExpression>> args) throws AlgebricksException {
         if (signature.getNamespace() == null) {
             return null;
         }
@@ -1406,8 +1412,8 @@ class LangExpressionToPlanTranslator
     }
 
     /**
-     * Eliminate shared operator references in a query plan.
-     * Deep copy a new query plan subtree whenever there is a shared operator reference.
+     * Eliminate shared operator references in a query plan. Deep copy a new query
+     * plan subtree whenever there is a shared operator reference.
      *
      * @param plan,
      *            the query plan.
@@ -1421,15 +1427,16 @@ class LangExpressionToPlanTranslator
     }
 
     /**
-     * Eliminate shared operator references in a query plan rooted at <code>currentOpRef.getValue()</code>.
-     * Deep copy a new query plan subtree whenever there is a shared operator reference.
+     * Eliminate shared operator references in a query plan rooted at
+     * <code>currentOpRef.getValue()</code>. Deep copy a new query plan subtree
+     * whenever there is a shared operator reference.
      *
      * @param currentOpRef,
      *            the operator reference to consider
      * @param opRefSet,
      *            the set storing seen operator references so far.
-     * @return a mapping that maps old variables to new variables, for the ancestors of
-     *         <code>currentOpRef</code> to replace variables properly.
+     * @return a mapping that maps old variables to new variables, for the ancestors
+     *         of <code>currentOpRef</code> to replace variables properly.
      * @throws CompilationException
      */
     private LinkedHashMap<LogicalVariable, LogicalVariable> eliminateSharedOperatorReference(
@@ -1441,9 +1448,12 @@ class LangExpressionToPlanTranslator
 
             // Recursively eliminates shared references in nested plans.
             if (currentOperator.hasNestedPlans()) {
-                // Since a nested plan tree itself can never be shared with another nested plan tree in
-                // another operator, the operation called in the if block does not need to replace
-                // any variables further for <code>currentOpRef.getValue()</code> nor its ancestor.
+                // Since a nested plan tree itself can never be shared with another nested plan
+                // tree in
+                // another operator, the operation called in the if block does not need to
+                // replace
+                // any variables further for <code>currentOpRef.getValue()</code> nor its
+                // ancestor.
                 AbstractOperatorWithNestedPlans opWithNestedPlan = (AbstractOperatorWithNestedPlans) currentOperator;
                 for (ILogicalPlan plan : opWithNestedPlan.getNestedPlans()) {
                     for (Mutable<ILogicalOperator> rootRef : plan.getRoots()) {
@@ -1465,7 +1475,8 @@ class LangExpressionToPlanTranslator
                     LinkedHashMap<LogicalVariable, LogicalVariable> cloneVarMap =
                             visitor.getInputToOutputVariableMapping();
 
-                    // Substitute variables according to the deep copy which generates new variables.
+                    // Substitute variables according to the deep copy which generates new
+                    // variables.
                     VariableUtilities.substituteVariables(currentOperator, cloneVarMap, null);
                     varMap.putAll(cloneVarMap);
 
@@ -1481,7 +1492,8 @@ class LangExpressionToPlanTranslator
                 // Substitute variables according to the new subtree.
                 VariableUtilities.substituteVariables(currentOperator, childVarMap, null);
 
-                // Updates mapping like <$a, $b> in varMap to <$a, $c>, where there is a mapping <$b, $c>
+                // Updates mapping like <$a, $b> in varMap to <$a, $c>, where there is a mapping
+                // <$b, $c>
                 // in childVarMap.
                 varMap.entrySet().forEach(entry -> {
                     LogicalVariable newVar = childVarMap.get(entry.getValue());
@@ -1512,7 +1524,8 @@ class LangExpressionToPlanTranslator
      *            the expression to select tuples that are processed by this branch.
      * @param branchExpression,
      *            the expression to be evaluated in this branch.
-     * @return a pair of the constructed subplan operator and the output variable for the branch.
+     * @return a pair of the constructed subplan operator and the output variable
+     *         for the branch.
      * @throws CompilationException
      */
     protected Pair<ILogicalOperator, LogicalVariable> constructSubplanOperatorForBranch(ILogicalOperator inputOp,
@@ -1523,7 +1536,8 @@ class LangExpressionToPlanTranslator
         Mutable<ILogicalOperator> nestedSource =
                 new MutableObject<>(new NestedTupleSourceOperator(new MutableObject<>(subplanOp)));
         SelectOperator select = new SelectOperator(selectExpr, false, null);
-        // The select operator cannot be moved up and down, otherwise it will cause typing issues (ASTERIXDB-1203).
+        // The select operator cannot be moved up and down, otherwise it will cause
+        // typing issues (ASTERIXDB-1203).
         OperatorPropertiesUtil.markMovable(select, false);
         select.getInputs().add(nestedSource);
         Pair<ILogicalOperator, LogicalVariable> pBranch = branchExpression.accept(this, new MutableObject<>(select));
@@ -1552,12 +1566,14 @@ class LangExpressionToPlanTranslator
         return new AssignOperator(v1, new MutableObject<>(comparison));
     }
 
-    // Generates the filter condition for whether a conditional branch should be executed.
+    // Generates the filter condition for whether a conditional branch should be
+    // executed.
     protected Mutable<ILogicalExpression> generateNoMatchedPrecedingWhenBranchesFilter(
             List<ILogicalExpression> inputBooleanExprs) {
         List<Mutable<ILogicalExpression>> arguments = new ArrayList<>();
         for (ILogicalExpression inputBooleanExpr : inputBooleanExprs) {
-            // A NULL/MISSING valued WHEN expression does not lead to the corresponding THEN execution.
+            // A NULL/MISSING valued WHEN expression does not lead to the corresponding THEN
+            // execution.
             // Therefore, we should check a previous WHEN boolean condition is not unknown.
             arguments.add(generateAndNotIsUnknownWrap(inputBooleanExpr));
         }
@@ -1580,7 +1596,8 @@ class LangExpressionToPlanTranslator
                 new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.AND), arguments));
     }
 
-    // Generates the plan for "UNION ALL" or union expression from its input expressions.
+    // Generates the plan for "UNION ALL" or union expression from its input
+    // expressions.
     protected Pair<ILogicalOperator, LogicalVariable> translateUnionAllFromInputExprs(List<ILangExpression> inputExprs,
             Mutable<ILogicalOperator> tupSource) throws CompilationException {
         List<Mutable<ILogicalOperator>> inputOpRefsToUnion = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 494eb65..a811454 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -265,9 +265,11 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         FileSplit outputFile = null;
         IAWriterFactory writerFactory = PrinterBasedWriterFactory.INSTANCE;
         IResultSerializerFactoryProvider resultSerializerFactoryProvider = ResultSerializerFactoryProvider.INSTANCE;
-        /* Since the system runs a large number of threads, when HTTP requests don't return, it becomes difficult to
-         * find the thread running the request to determine where it has stopped.
-         * Setting the thread name helps make that easier
+        /*
+         * Since the system runs a large number of threads, when HTTP requests don't
+         * return, it becomes difficult to find the thread running the request to
+         * determine where it has stopped. Setting the thread name helps make that
+         * easier
          */
         String threadName = Thread.currentThread().getName();
         Thread.currentThread().setName(QueryTranslator.class.getSimpleName());
@@ -397,8 +399,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                         // No op
                         break;
                     case Statement.Kind.EXTENSION:
-                        ((IExtensionStatement) stmt)
-                                .handle(hcc, this, requestParameters, metadataProvider, resultSetIdCounter);
+                        ((IExtensionStatement) stmt).handle(hcc, this, requestParameters, metadataProvider,
+                                resultSetIdCounter);
                         break;
                     default:
                         throw new CompilationException("Unknown function");
@@ -641,7 +643,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                 metadataProvider.setMetadataTxnContext(mdTxnCtx);
             }
 
-            // #. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
+            // #. add a new dataset with PendingNoOp after deleting the dataset with
+            // PendingAddOp
             MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
             dataset.setPendingOp(MetadataUtil.PENDING_NO_OP);
             MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
@@ -656,7 +659,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                 // #. execute compensation operations
                 // remove the index in NC
                 // [Notice]
-                // As long as we updated(and committed) metadata, we should remove any effect of the job
+                // As long as we updated(and committed) metadata, we should remove any effect of
+                // the job
                 // because an exception occurs during runJob.
                 mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                 bActiveTxn = true;
@@ -790,9 +794,11 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             int keyIndex = 0;
             boolean overridesFieldTypes = false;
 
-            // this set is used to detect duplicates in the specified keys in the create index statement
+            // this set is used to detect duplicates in the specified keys in the create
+            // index statement
             // e.g. CREATE INDEX someIdx on dataset(id,id).
-            // checking only the names is not enough. Need also to check the source indicators for cases like:
+            // checking only the names is not enough. Need also to check the source
+            // indicators for cases like:
             // CREATE INDEX someIdx on dataset(meta().id, id)
             Set<Pair<List<String>, Integer>> indexKeysSet = new HashSet<>();
 
@@ -820,10 +826,11 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                         throw new AsterixException(ErrorCode.INDEX_ILLEGAL_ENFORCED_NON_OPTIONAL,
                                 String.valueOf(fieldExpr.first));
                     }
-                    // don't allow creating an enforced index on a closed-type field, fields that are part of schema.
+                    // don't allow creating an enforced index on a closed-type field, fields that
+                    // are part of schema.
                     // get the field type, if it's not null, then the field is closed-type
-                    if (stmtCreateIndex.isEnforced() &&
-                            subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size())) != null) {
+                    if (stmtCreateIndex.isEnforced()
+                            && subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size())) != null) {
                         throw new AsterixException(ErrorCode.INDEX_ILLEGAL_ENFORCED_ON_CLOSED_FIELD,
                                 String.valueOf(fieldExpr.first));
                     }
@@ -845,9 +852,10 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                             "Unknown type " + (fieldExpr.second == null ? fieldExpr.first : fieldExpr.second));
                 }
 
-                // try to add the key & its source to the set of keys, if key couldn't be added, there is a duplicate
-                if (!indexKeysSet.add(new Pair<>(fieldExpr.first,
-                        stmtCreateIndex.getFieldSourceIndicators().get(keyIndex)))) {
+                // try to add the key & its source to the set of keys, if key couldn't be added,
+                // there is a duplicate
+                if (!indexKeysSet
+                        .add(new Pair<>(fieldExpr.first, stmtCreateIndex.getFieldSourceIndicators().get(keyIndex)))) {
                     throw new AsterixException(ErrorCode.INDEX_ILLEGAL_REPETITIVE_FIELD,
                             String.valueOf(fieldExpr.first));
                 }
@@ -859,9 +867,11 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
 
             validateIndexKeyFields(stmtCreateIndex, keySourceIndicators, aRecordType, metaRecordType, indexFields,
                     indexFieldTypes);
-            // Checks whether a user is trying to create an inverted secondary index on a dataset
+            // Checks whether a user is trying to create an inverted secondary index on a
+            // dataset
             // with a variable-length primary key.
-            // Currently, we do not support this. Therefore, as a temporary solution, we print an
+            // Currently, we do not support this. Therefore, as a temporary solution, we
+            // print an
             // error message and stop.
             if (stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
                     || stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
@@ -994,8 +1004,10 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             runJob(hcc, spec, jobFlags);
 
             // #. flush the internal dataset
-            // We need this to guarantee the correctness of component Id acceleration for secondary-to-primary index.
-            // Otherwise, the new secondary index component would corresponding to a partial memory component
+            // We need this to guarantee the correctness of component Id acceleration for
+            // secondary-to-primary index.
+            // Otherwise, the new secondary index component would corresponding to a partial
+            // memory component
             // of the primary index, which is incorrect.
             if (ds.getDatasetType() == DatasetType.INTERNAL) {
                 FlushDatasetUtil.flushDataset(hcc, metadataProvider, index.getDataverseName(), index.getDatasetName());
@@ -1017,7 +1029,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             bActiveTxn = true;
             metadataProvider.setMetadataTxnContext(mdTxnCtx);
 
-            // #. add another new index with PendingNoOp after deleting the index with PendingAddOp
+            // #. add another new index with PendingNoOp after deleting the index with
+            // PendingAddOp
             MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), index.getDataverseName(),
                     index.getDatasetName(), index.getIndexName());
             index.setPendingOp(MetadataUtil.PENDING_NO_OP);
@@ -1038,7 +1051,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             if (bActiveTxn) {
                 abort(e, e, mdTxnCtx);
             }
-            // If files index was replicated for external dataset, it should be cleaned up on NC side
+            // If files index was replicated for external dataset, it should be cleaned up
+            // on NC side
             if (filesIndexReplicated) {
                 mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                 bActiveTxn = true;
@@ -1687,7 +1701,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
     }
 
     protected boolean checkWhetherFunctionIsBeingUsed(MetadataTransactionContext ctx, String dataverseName,
-            String functionName, int arity, String currentDataverse) throws MetadataException {
+            String functionName, int arity, String currentDataverse) throws AlgebricksException {
         List<Dataverse> allDataverses = MetadataManager.INSTANCE.getDataverses(ctx);
         for (Dataverse dataverse : allDataverses) {
             if (currentDataverse != null && dataverse.getDataverseName().equals(currentDataverse)) {
@@ -1876,7 +1890,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             MetadataProvider metadataProvider, InsertStatement insertUpsert)
             throws RemoteException, AlgebricksException, ACIDException {
 
-        // Insert/upsert statement rewriting (happens under the same ongoing metadata transaction)
+        // Insert/upsert statement rewriting (happens under the same ongoing metadata
+        // transaction)
         Pair<IReturningStatement, Integer> rewrittenResult =
                 apiFramework.reWriteQuery(declaredFunctions, metadataProvider, insertUpsert, sessionOutput);
 
@@ -1898,7 +1913,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             default:
                 throw new AlgebricksException("Unsupported statement type " + rewrittenInsertUpsert.getKind());
         }
-        // Insert/upsert statement compilation (happens under the same ongoing metadata transaction)
+        // Insert/upsert statement compilation (happens under the same ongoing metadata
+        // transaction)
         return apiFramework.compileQuery(clusterInfoCollector, metadataProvider, rewrittenInsertUpsert.getQuery(),
                 rewrittenResult.second, datasetName, sessionOutput, clfrqs);
     }
@@ -2627,7 +2643,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             // all index updates has completed successfully, record transaction state
             spec = ExternalIndexingOperations.buildCommitJob(ds, indexes, metadataProvider);
 
-            // Aquire write latch again -> start a transaction and record the decision to commit
+            // Aquire write latch again -> start a transaction and record the decision to
+            // commit
             mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
             metadataProvider.setMetadataTxnContext(mdTxnCtx);
             bActiveTxn = true;

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java
index 6ac9436..1ca9316 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java
@@ -33,6 +33,7 @@ import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.Function;
 import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
@@ -65,14 +66,16 @@ public class FunctionUtil {
     }
 
     /**
-     * Retrieve stored functions (from CREATE FUNCTION statements) that have been used in an expression.
+     * Retrieve stored functions (from CREATE FUNCTION statements) that have been
+     * used in an expression.
      *
      * @param metadataProvider,
      *            the metadata provider
      * @param expression,
      *            the expression for analysis
      * @param declaredFunctions,
-     *            a set of declared functions in the query, which can potentially override stored functions.
+     *            a set of declared functions in the query, which can potentially
+     *            override stored functions.
      * @param functionCollector,
      *            for collecting function calls in the <code>expression</code>
      * @param functionParser,
@@ -85,8 +88,8 @@ public class FunctionUtil {
             Expression expression, List<FunctionSignature> declaredFunctions, List<FunctionDecl> inputFunctionDecls,
             IFunctionCollector functionCollector, IFunctionParser functionParser,
             IFunctionNormalizer functionNormalizer) throws CompilationException {
-        List<FunctionDecl> functionDecls = inputFunctionDecls == null ? new ArrayList<>()
-                : new ArrayList<>(inputFunctionDecls);
+        List<FunctionDecl> functionDecls =
+                inputFunctionDecls == null ? new ArrayList<>() : new ArrayList<>(inputFunctionDecls);
         if (expression == null) {
             return functionDecls;
         }
@@ -102,13 +105,22 @@ public class FunctionUtil {
             }
             String namespace = signature.getNamespace();
             // Checks the existence of the referred dataverse.
-            if (!namespace.equals(FunctionConstants.ASTERIX_NS)
-                    && !namespace.equals(AlgebricksBuiltinFunctions.ALGEBRICKS_NS)
-                    && metadataProvider.findDataverse(namespace) == null) {
-                throw new CompilationException("In function call \"" + namespace + "." + signature.getName()
-                        + "(...)\", the dataverse \"" + namespace + "\" cannot be found!");
+            try {
+                if (!namespace.equals(FunctionConstants.ASTERIX_NS)
+                        && !namespace.equals(AlgebricksBuiltinFunctions.ALGEBRICKS_NS)
+                        && metadataProvider.findDataverse(namespace) == null) {
+                    throw new CompilationException("In function call \"" + namespace + "." + signature.getName()
+                            + "(...)\", the dataverse \"" + namespace + "\" cannot be found!");
+                }
+            } catch (AlgebricksException e) {
+                throw new CompilationException(e);
+            }
+            Function function;
+            try {
+                function = lookupUserDefinedFunctionDecl(metadataProvider.getMetadataTxnContext(), signature);
+            } catch (AlgebricksException e) {
+                throw new CompilationException(e);
             }
-            Function function = lookupUserDefinedFunctionDecl(metadataProvider.getMetadataTxnContext(), signature);
             if (function == null) {
                 FunctionSignature normalizedSignature = functionNormalizer == null ? signature
                         : functionNormalizer.normalizeBuiltinFunctionSignature(signature);
@@ -144,7 +156,7 @@ public class FunctionUtil {
     }
 
     private static Function lookupUserDefinedFunctionDecl(MetadataTransactionContext mdTxnCtx,
-            FunctionSignature signature) throws CompilationException {
+            FunctionSignature signature) throws AlgebricksException {
         if (signature.getNamespace() == null) {
             return null;
         }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
index 23e6fb0..2e872fc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
@@ -54,6 +54,7 @@ import org.apache.asterix.metadata.entities.Node;
 import org.apache.asterix.metadata.entities.NodeGroup;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.transaction.management.service.transaction.JobIdFactory;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 /**
@@ -95,7 +96,8 @@ public class MetadataManager implements IMetadataManager {
     private final ReadWriteLock metadataLatch;
     protected boolean rebindMetadataNode = false;
 
-    // TODO(mblow): replace references of this (non-constant) field with a method, update field name accordingly
+    // TODO(mblow): replace references of this (non-constant) field with a method,
+    // update field name accordingly
     public static IMetadataManager INSTANCE;
 
     private MetadataManager(IAsterixStateProxy proxy, IMetadataNode metadataNode) {
@@ -148,7 +150,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addDataverse(MetadataTransactionContext ctx, Dataverse dataverse) throws MetadataException {
+    public void addDataverse(MetadataTransactionContext ctx, Dataverse dataverse) throws AlgebricksException {
         try {
             metadataNode.addDataverse(ctx.getJobId(), dataverse);
         } catch (RemoteException e) {
@@ -158,7 +160,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void dropDataverse(MetadataTransactionContext ctx, String dataverseName) throws MetadataException {
+    public void dropDataverse(MetadataTransactionContext ctx, String dataverseName) throws AlgebricksException {
         try {
             metadataNode.dropDataverse(ctx.getJobId(), dataverseName);
         } catch (RemoteException e) {
@@ -168,7 +170,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public List<Dataverse> getDataverses(MetadataTransactionContext ctx) throws MetadataException {
+    public List<Dataverse> getDataverses(MetadataTransactionContext ctx) throws AlgebricksException {
         try {
             return metadataNode.getDataverses(ctx.getJobId());
         } catch (RemoteException e) {
@@ -177,7 +179,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public Dataverse getDataverse(MetadataTransactionContext ctx, String dataverseName) throws MetadataException {
+    public Dataverse getDataverse(MetadataTransactionContext ctx, String dataverseName) throws AlgebricksException {
         // First look in the context to see if this transaction created the
         // requested dataverse itself (but the dataverse is still uncommitted).
         Dataverse dataverse = ctx.getDataverse(dataverseName);
@@ -211,7 +213,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public List<Dataset> getDataverseDatasets(MetadataTransactionContext ctx, String dataverseName)
-            throws MetadataException {
+            throws AlgebricksException {
         List<Dataset> dataverseDatasets = new ArrayList<>();
         // add uncommitted temporary datasets
         for (Dataset dataset : ctx.getDataverseDatasets(dataverseName)) {
@@ -238,7 +240,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addDataset(MetadataTransactionContext ctx, Dataset dataset) throws MetadataException {
+    public void addDataset(MetadataTransactionContext ctx, Dataset dataset) throws AlgebricksException {
         // add dataset into metadataNode
         if (!dataset.getDatasetDetails().isTemp()) {
             try {
@@ -254,7 +256,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void dropDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName)
-            throws MetadataException {
+            throws AlgebricksException {
         Dataset dataset = findDataset(ctx, dataverseName, datasetName);
         // If a dataset is not in the cache, then it could not be a temp dataset
         if (dataset == null || !dataset.getDatasetDetails().isTemp()) {
@@ -271,7 +273,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public Dataset getDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName)
-            throws MetadataException {
+            throws AlgebricksException {
 
         // First look in the context to see if this transaction created the
         // requested dataset itself (but the dataset is still uncommitted).
@@ -307,7 +309,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public List<Index> getDatasetIndexes(MetadataTransactionContext ctx, String dataverseName, String datasetName)
-            throws MetadataException {
+            throws AlgebricksException {
         List<Index> datasetIndexes = new ArrayList<>();
         Dataset dataset = findDataset(ctx, dataverseName, datasetName);
         if (dataset == null) {
@@ -329,7 +331,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void addCompactionPolicy(MetadataTransactionContext mdTxnCtx, CompactionPolicy compactionPolicy)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.addCompactionPolicy(mdTxnCtx.getJobId(), compactionPolicy);
         } catch (RemoteException e) {
@@ -340,7 +342,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public CompactionPolicy getCompactionPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
-            throws MetadataException {
+            throws AlgebricksException {
 
         CompactionPolicy compactionPolicy;
         try {
@@ -352,7 +354,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addDatatype(MetadataTransactionContext ctx, Datatype datatype) throws MetadataException {
+    public void addDatatype(MetadataTransactionContext ctx, Datatype datatype) throws AlgebricksException {
         try {
             metadataNode.addDatatype(ctx.getJobId(), datatype);
         } catch (RemoteException e) {
@@ -368,7 +370,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void dropDatatype(MetadataTransactionContext ctx, String dataverseName, String datatypeName)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.dropDatatype(ctx.getJobId(), dataverseName, datatypeName);
         } catch (RemoteException e) {
@@ -379,7 +381,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public Datatype getDatatype(MetadataTransactionContext ctx, String dataverseName, String datatypeName)
-            throws MetadataException {
+            throws AlgebricksException {
         // First look in the context to see if this transaction created the
         // requested datatype itself (but the datatype is still uncommitted).
         Datatype datatype = ctx.getDatatype(dataverseName, datatypeName);
@@ -397,9 +399,9 @@ public class MetadataManager implements IMetadataManager {
         datatype = cache.getDatatype(dataverseName, datatypeName);
         if (datatype != null) {
             // Datatype is already in the cache, don't add it again.
-            //create a new Datatype object with a new ARecordType object in order to avoid
-            //concurrent access to UTF8StringPointable comparator in ARecordType object.
-            //see issue 510
+            // create a new Datatype object with a new ARecordType object in order to avoid
+            // concurrent access to UTF8StringPointable comparator in ARecordType object.
+            // see issue 510
             ARecordType aRecType = (ARecordType) datatype.getDatatype();
             return new Datatype(
                     datatype.getDataverseName(), datatype.getDatatypeName(), new ARecordType(aRecType.getTypeName(),
@@ -420,7 +422,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addIndex(MetadataTransactionContext ctx, Index index) throws MetadataException {
+    public void addIndex(MetadataTransactionContext ctx, Index index) throws AlgebricksException {
         String dataverseName = index.getDataverseName();
         String datasetName = index.getDatasetName();
         Dataset dataset = findDataset(ctx, dataverseName, datasetName);
@@ -435,7 +437,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addAdapter(MetadataTransactionContext mdTxnCtx, DatasourceAdapter adapter) throws MetadataException {
+    public void addAdapter(MetadataTransactionContext mdTxnCtx, DatasourceAdapter adapter) throws AlgebricksException {
         try {
             metadataNode.addAdapter(mdTxnCtx.getJobId(), adapter);
         } catch (RemoteException e) {
@@ -447,10 +449,12 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void dropIndex(MetadataTransactionContext ctx, String dataverseName, String datasetName, String indexName)
-            throws MetadataException {
+            throws AlgebricksException {
         Dataset dataset = findDataset(ctx, dataverseName, datasetName);
-        // If a dataset is not in the cache, then it could be an unloaded persistent dataset.
-        // If the dataset is a temp dataset, then we do not need to call any MedataNode operations.
+        // If a dataset is not in the cache, then it could be an unloaded persistent
+        // dataset.
+        // If the dataset is a temp dataset, then we do not need to call any MedataNode
+        // operations.
         if (dataset == null || !dataset.getDatasetDetails().isTemp()) {
             try {
                 metadataNode.dropIndex(ctx.getJobId(), dataverseName, datasetName, indexName);
@@ -463,7 +467,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public Index getIndex(MetadataTransactionContext ctx, String dataverseName, String datasetName, String indexName)
-            throws MetadataException {
+            throws AlgebricksException {
 
         // First look in the context to see if this transaction created the
         // requested index itself (but the index is still uncommitted).
@@ -499,7 +503,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addNode(MetadataTransactionContext ctx, Node node) throws MetadataException {
+    public void addNode(MetadataTransactionContext ctx, Node node) throws AlgebricksException {
         try {
             metadataNode.addNode(ctx.getJobId(), node);
         } catch (RemoteException e) {
@@ -508,7 +512,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addNodegroup(MetadataTransactionContext ctx, NodeGroup nodeGroup) throws MetadataException {
+    public void addNodegroup(MetadataTransactionContext ctx, NodeGroup nodeGroup) throws AlgebricksException {
         try {
             metadataNode.addNodeGroup(ctx.getJobId(), nodeGroup);
         } catch (RemoteException e) {
@@ -519,7 +523,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void dropNodegroup(MetadataTransactionContext ctx, String nodeGroupName, boolean failSilently)
-            throws MetadataException {
+            throws AlgebricksException {
         boolean dropped;
         try {
             dropped = metadataNode.dropNodegroup(ctx.getJobId(), nodeGroupName, failSilently);
@@ -532,7 +536,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public NodeGroup getNodegroup(MetadataTransactionContext ctx, String nodeGroupName) throws MetadataException {
+    public NodeGroup getNodegroup(MetadataTransactionContext ctx, String nodeGroupName) throws AlgebricksException {
         // First look in the context to see if this transaction created the
         // requested dataverse itself (but the dataverse is still uncommitted).
         NodeGroup nodeGroup = ctx.getNodeGroup(nodeGroupName);
@@ -565,7 +569,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addFunction(MetadataTransactionContext mdTxnCtx, Function function) throws MetadataException {
+    public void addFunction(MetadataTransactionContext mdTxnCtx, Function function) throws AlgebricksException {
         try {
             metadataNode.addFunction(mdTxnCtx.getJobId(), function);
         } catch (RemoteException e) {
@@ -576,7 +580,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void dropFunction(MetadataTransactionContext ctx, FunctionSignature functionSignature)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.dropFunction(ctx.getJobId(), functionSignature);
         } catch (RemoteException e) {
@@ -587,7 +591,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public Function getFunction(MetadataTransactionContext ctx, FunctionSignature functionSignature)
-            throws MetadataException {
+            throws AlgebricksException {
         // First look in the context to see if this transaction created the
         // requested dataset itself (but the dataset is still uncommitted).
         Function function = ctx.getFunction(functionSignature);
@@ -626,9 +630,10 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public List<Function> getFunctions(MetadataTransactionContext ctx, String dataverseName) throws MetadataException {
+    public List<Function> getFunctions(MetadataTransactionContext ctx, String dataverseName)
+            throws AlgebricksException {
         try {
-           return metadataNode.getFunctions(ctx.getJobId(), dataverseName);
+            return metadataNode.getFunctions(ctx.getJobId(), dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(e);
         }
@@ -636,7 +641,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void addFeedPolicy(MetadataTransactionContext mdTxnCtx, FeedPolicyEntity feedPolicy)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.addFeedPolicy(mdTxnCtx.getJobId(), feedPolicy);
         } catch (RemoteException e) {
@@ -646,7 +651,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void initializeDatasetIdFactory(MetadataTransactionContext ctx) throws MetadataException {
+    public void initializeDatasetIdFactory(MetadataTransactionContext ctx) throws AlgebricksException {
         try {
             metadataNode.initializeDatasetIdFactory(ctx.getJobId());
         } catch (RemoteException e) {
@@ -655,7 +660,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public int getMostRecentDatasetId() throws MetadataException {
+    public int getMostRecentDatasetId() throws AlgebricksException {
         try {
             return metadataNode.getMostRecentDatasetId();
         } catch (RemoteException e) {
@@ -665,7 +670,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public List<Function> getDataverseFunctions(MetadataTransactionContext ctx, String dataverseName)
-            throws MetadataException {
+            throws AlgebricksException {
         List<Function> dataverseFunctions;
         try {
             // Assuming that the transaction can read its own writes on the
@@ -681,7 +686,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void dropAdapter(MetadataTransactionContext ctx, String dataverseName, String name)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.dropAdapter(ctx.getJobId(), dataverseName, name);
         } catch (RemoteException e) {
@@ -691,7 +696,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public DatasourceAdapter getAdapter(MetadataTransactionContext ctx, String dataverseName, String name)
-            throws MetadataException {
+            throws AlgebricksException {
         DatasourceAdapter adapter;
         try {
             adapter = metadataNode.getAdapter(ctx.getJobId(), dataverseName, name);
@@ -703,7 +708,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void dropLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.dropLibrary(ctx.getJobId(), dataverseName, libraryName);
         } catch (RemoteException e) {
@@ -714,7 +719,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public List<Library> getDataverseLibraries(MetadataTransactionContext ctx, String dataverseName)
-            throws MetadataException {
+            throws AlgebricksException {
         List<Library> dataverseLibaries;
         try {
             // Assuming that the transaction can read its own writes on the
@@ -729,7 +734,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addLibrary(MetadataTransactionContext ctx, Library library) throws MetadataException {
+    public void addLibrary(MetadataTransactionContext ctx, Library library) throws AlgebricksException {
         try {
             metadataNode.addLibrary(ctx.getJobId(), library);
         } catch (RemoteException e) {
@@ -740,7 +745,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public Library getLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
-            throws MetadataException, RemoteException {
+            throws AlgebricksException, RemoteException {
         Library library;
         try {
             library = metadataNode.getLibrary(ctx.getJobId(), dataverseName, libraryName);
@@ -772,7 +777,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public FeedPolicyEntity getFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
-            throws MetadataException {
+            throws AlgebricksException {
 
         FeedPolicyEntity feedPolicy;
         try {
@@ -784,7 +789,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public Feed getFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws MetadataException {
+    public Feed getFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws AlgebricksException {
         Feed feed;
         try {
             feed = metadataNode.getFeed(ctx.getJobId(), dataverse, feedName);
@@ -795,7 +800,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public List<Feed> getFeeds(MetadataTransactionContext ctx, String dataverse) throws MetadataException {
+    public List<Feed> getFeeds(MetadataTransactionContext ctx, String dataverse) throws AlgebricksException {
         List<Feed> feeds;
         try {
             feeds = metadataNode.getFeeds(ctx.getJobId(), dataverse);
@@ -806,7 +811,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void dropFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws MetadataException {
+    public void dropFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws AlgebricksException {
         Feed feed = null;
         List<FeedConnection> feedConnections = null;
         try {
@@ -824,7 +829,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addFeed(MetadataTransactionContext ctx, Feed feed) throws MetadataException {
+    public void addFeed(MetadataTransactionContext ctx, Feed feed) throws AlgebricksException {
         try {
             metadataNode.addFeed(ctx.getJobId(), feed);
         } catch (RemoteException e) {
@@ -835,7 +840,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void addFeedConnection(MetadataTransactionContext ctx, FeedConnection feedConnection)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.addFeedConnection(ctx.getJobId(), feedConnection);
         } catch (RemoteException e) {
@@ -846,7 +851,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void dropFeedConnection(MetadataTransactionContext ctx, String dataverseName, String feedName,
-            String datasetName) throws MetadataException {
+            String datasetName) throws AlgebricksException {
         try {
             metadataNode.dropFeedConnection(ctx.getJobId(), dataverseName, feedName, datasetName);
         } catch (RemoteException e) {
@@ -857,7 +862,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public FeedConnection getFeedConnection(MetadataTransactionContext ctx, String dataverseName, String feedName,
-            String datasetName) throws MetadataException {
+            String datasetName) throws AlgebricksException {
         try {
             return metadataNode.getFeedConnection(ctx.getJobId(), dataverseName, feedName, datasetName);
         } catch (RemoteException e) {
@@ -867,7 +872,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public List<FeedConnection> getFeedConections(MetadataTransactionContext ctx, String dataverseName, String feedName)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             return metadataNode.getFeedConnections(ctx.getJobId(), dataverseName, feedName);
         } catch (RemoteException e) {
@@ -877,7 +882,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public List<DatasourceAdapter> getDataverseAdapters(MetadataTransactionContext mdTxnCtx, String dataverse)
-            throws MetadataException {
+            throws AlgebricksException {
         List<DatasourceAdapter> dataverseAdapters;
         try {
             dataverseAdapters = metadataNode.getDataverseAdapters(mdTxnCtx.getJobId(), dataverse);
@@ -889,7 +894,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public void dropFeedPolicy(MetadataTransactionContext mdTxnCtx, String dataverseName, String policyName)
-            throws MetadataException {
+            throws AlgebricksException {
         FeedPolicyEntity feedPolicy;
         try {
             feedPolicy = metadataNode.getFeedPolicy(mdTxnCtx.getJobId(), dataverseName, policyName);
@@ -901,7 +906,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     public List<FeedPolicyEntity> getDataversePolicies(MetadataTransactionContext mdTxnCtx, String dataverse)
-            throws MetadataException {
+            throws AlgebricksException {
         List<FeedPolicyEntity> dataverseFeedPolicies;
         try {
             dataverseFeedPolicies = metadataNode.getDataversePolicies(mdTxnCtx.getJobId(), dataverse);
@@ -913,7 +918,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public List<ExternalFile> getDatasetExternalFiles(MetadataTransactionContext mdTxnCtx, Dataset dataset)
-            throws MetadataException {
+            throws AlgebricksException {
         List<ExternalFile> externalFiles;
         try {
             externalFiles = metadataNode.getExternalFiles(mdTxnCtx.getJobId(), dataset);
@@ -924,7 +929,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addExternalFile(MetadataTransactionContext ctx, ExternalFile externalFile) throws MetadataException {
+    public void addExternalFile(MetadataTransactionContext ctx, ExternalFile externalFile) throws AlgebricksException {
         try {
             metadataNode.addExternalFile(ctx.getJobId(), externalFile);
         } catch (RemoteException e) {
@@ -933,7 +938,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void dropExternalFile(MetadataTransactionContext ctx, ExternalFile externalFile) throws MetadataException {
+    public void dropExternalFile(MetadataTransactionContext ctx, ExternalFile externalFile) throws AlgebricksException {
         try {
             metadataNode.dropExternalFile(ctx.getJobId(), externalFile.getDataverseName(),
                     externalFile.getDatasetName(), externalFile.getFileNumber());
@@ -944,7 +949,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public ExternalFile getExternalFile(MetadataTransactionContext ctx, String dataverseName, String datasetName,
-            Integer fileNumber) throws MetadataException {
+            Integer fileNumber) throws AlgebricksException {
         ExternalFile file;
         try {
             file = metadataNode.getExternalFile(ctx.getJobId(), dataverseName, datasetName, fileNumber);
@@ -954,10 +959,10 @@ public class MetadataManager implements IMetadataManager {
         return file;
     }
 
-    //TODO: Optimize <-- use keys instead of object -->
+    // TODO: Optimize <-- use keys instead of object -->
     @Override
     public void dropDatasetExternalFiles(MetadataTransactionContext mdTxnCtx, Dataset dataset)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.dropExternalFiles(mdTxnCtx.getJobId(), dataset);
         } catch (RemoteException e) {
@@ -966,7 +971,7 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void updateDataset(MetadataTransactionContext ctx, Dataset dataset) throws MetadataException {
+    public void updateDataset(MetadataTransactionContext ctx, Dataset dataset) throws AlgebricksException {
         try {
             metadataNode.updateDataset(ctx.getJobId(), dataset);
         } catch (RemoteException e) {
@@ -992,7 +997,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public <T extends IExtensionMetadataEntity> void addEntity(MetadataTransactionContext mdTxnCtx, T entity)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.addEntity(mdTxnCtx.getJobId(), entity);
         } catch (RemoteException e) {
@@ -1002,7 +1007,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public <T extends IExtensionMetadataEntity> void upsertEntity(MetadataTransactionContext mdTxnCtx, T entity)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.upsertEntity(mdTxnCtx.getJobId(), entity);
         } catch (RemoteException e) {
@@ -1012,7 +1017,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public <T extends IExtensionMetadataEntity> void deleteEntity(MetadataTransactionContext mdTxnCtx, T entity)
-            throws MetadataException {
+            throws AlgebricksException {
         try {
             metadataNode.deleteEntity(mdTxnCtx.getJobId(), entity);
         } catch (RemoteException e) {
@@ -1022,7 +1027,7 @@ public class MetadataManager implements IMetadataManager {
 
     @Override
     public <T extends IExtensionMetadataEntity> List<T> getEntities(MetadataTransactionContext mdTxnCtx,
-            IExtensionMetadataSearchKey searchKey) throws MetadataException {
+            IExtensionMetadataSearchKey searchKey) throws AlgebricksException {
         try {
             return metadataNode.getEntities(mdTxnCtx.getJobId(), searchKey);
         } catch (RemoteException e) {