You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by im...@apache.org on 2015/08/25 18:44:12 UTC

[24/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
new file mode 100644
index 0000000..aa7a6ac
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
@@ -0,0 +1,242 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.optimizer.base.AnalysisUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractUnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+
+/**
+ * Operator subtree that matches the following patterns, and provides convenient access to its nodes:
+ * (select)? <-- (assign | unnest)* <-- (datasource scan | unnest-map)
+ */
+public class OptimizableOperatorSubTree {
+
+    public static enum DataSourceType {
+        DATASOURCE_SCAN,
+        EXTERNAL_SCAN,
+        PRIMARY_INDEX_LOOKUP,
+        COLLECTION_SCAN,
+        NO_DATASOURCE
+    }
+
+    public ILogicalOperator root = null;
+    public Mutable<ILogicalOperator> rootRef = null;
+    public final List<Mutable<ILogicalOperator>> assignsAndUnnestsRefs = new ArrayList<Mutable<ILogicalOperator>>();
+    public final List<AbstractLogicalOperator> assignsAndUnnests = new ArrayList<AbstractLogicalOperator>();
+    public Mutable<ILogicalOperator> dataSourceRef = null;
+    public DataSourceType dataSourceType = DataSourceType.NO_DATASOURCE;
+    // Dataset and type metadata. Set in setDatasetAndTypeMetadata().
+    public Dataset dataset = null;
+    public ARecordType recordType = null;
+
+    public boolean initFromSubTree(Mutable<ILogicalOperator> subTreeOpRef) {
+        reset();
+        rootRef = subTreeOpRef;
+        root = subTreeOpRef.getValue();
+        // Examine the op's children to match the expected patterns.
+        AbstractLogicalOperator subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
+        do {
+            // Skip select operator.
+            if (subTreeOp.getOperatorTag() == LogicalOperatorTag.SELECT) {
+                subTreeOpRef = subTreeOp.getInputs().get(0);
+                subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
+            }
+            // Check primary-index pattern.
+            if (subTreeOp.getOperatorTag() != LogicalOperatorTag.ASSIGN
+                    && subTreeOp.getOperatorTag() != LogicalOperatorTag.UNNEST) {
+                // Pattern may still match if we are looking for primary index matches as well.
+                return initializeDataSource(subTreeOpRef);
+            }
+            // Match (assign | unnest)+.
+            while (subTreeOp.getOperatorTag() == LogicalOperatorTag.ASSIGN
+                    || subTreeOp.getOperatorTag() == LogicalOperatorTag.UNNEST) {
+                assignsAndUnnestsRefs.add(subTreeOpRef);
+                assignsAndUnnests.add(subTreeOp);
+
+                subTreeOpRef = subTreeOp.getInputs().get(0);
+                subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
+            };
+        } while (subTreeOp.getOperatorTag() == LogicalOperatorTag.SELECT);
+
+        // Match data source (datasource scan or primary index search).
+        return initializeDataSource(subTreeOpRef);
+    }
+
+    private boolean initializeDataSource(Mutable<ILogicalOperator> subTreeOpRef) {
+        AbstractLogicalOperator subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
+        if (subTreeOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+            dataSourceType = DataSourceType.DATASOURCE_SCAN;
+            dataSourceRef = subTreeOpRef;
+            return true;
+        } else if (subTreeOp.getOperatorTag() == LogicalOperatorTag.EXTERNAL_LOOKUP) {
+            dataSourceType = DataSourceType.EXTERNAL_SCAN;
+            dataSourceRef = subTreeOpRef;
+            return true;
+        } else if (subTreeOp.getOperatorTag() == LogicalOperatorTag.EMPTYTUPLESOURCE) {
+            dataSourceType = DataSourceType.COLLECTION_SCAN;
+            dataSourceRef = subTreeOpRef;
+            return true;
+        } else if (subTreeOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
+            UnnestMapOperator unnestMapOp = (UnnestMapOperator) subTreeOp;
+            ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
+            if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+                if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
+                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
+                    jobGenParams.readFromFuncArgs(f.getArguments());
+                    if (jobGenParams.isPrimaryIndex()) {
+                        dataSourceType = DataSourceType.PRIMARY_INDEX_LOOKUP;
+                        dataSourceRef = subTreeOpRef;
+                        return true;
+                    }
+                }
+            }
+        }
+
+        return false;
+    }
+
+    /**
+     * Find the dataset corresponding to the datasource scan in the metadata.
+     * Also sets recordType to be the type of that dataset.
+     */
+    public boolean setDatasetAndTypeMetadata(AqlMetadataProvider metadataProvider) throws AlgebricksException {
+        String dataverseName = null;
+        String datasetName = null;
+        switch (dataSourceType) {
+            case DATASOURCE_SCAN:
+                DataSourceScanOperator dataSourceScan = (DataSourceScanOperator) dataSourceRef.getValue();
+                Pair<String, String> datasetInfo = AnalysisUtil.getDatasetInfo(dataSourceScan);
+                dataverseName = datasetInfo.first;
+                datasetName = datasetInfo.second;
+                break;
+            case PRIMARY_INDEX_LOOKUP:
+                AbstractUnnestOperator unnestMapOp = (AbstractUnnestOperator) dataSourceRef.getValue();
+                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
+                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+                AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
+                jobGenParams.readFromFuncArgs(f.getArguments());
+                datasetName = jobGenParams.getDatasetName();
+                dataverseName = jobGenParams.getDataverseName();
+                break;
+            case EXTERNAL_SCAN:
+                ExternalDataLookupOperator externalScan = (ExternalDataLookupOperator) dataSourceRef.getValue();
+                datasetInfo = AnalysisUtil.getDatasetInfo(externalScan);
+                dataverseName = datasetInfo.first;
+                datasetName = datasetInfo.second;
+                break;
+            case COLLECTION_SCAN:
+                return true;
+            case NO_DATASOURCE:
+            default:
+                return false;
+        }
+        if (dataverseName == null || datasetName == null) {
+            return false;
+        }
+        // Find the dataset corresponding to the datasource in the metadata.
+        dataset = metadataProvider.findDataset(dataverseName, datasetName);
+        if (dataset == null) {
+            throw new AlgebricksException("No metadata for dataset " + datasetName);
+        }
+        // Get the record type for that dataset.
+        IAType itemType = metadataProvider.findType(dataverseName, dataset.getItemTypeName());
+        if (itemType.getTypeTag() != ATypeTag.RECORD) {
+            return false;
+        }
+        recordType = (ARecordType) itemType;
+        return true;
+    }
+
+    public boolean hasDataSource() {
+        return dataSourceType != DataSourceType.NO_DATASOURCE;
+    }
+
+    public boolean hasDataSourceScan() {
+        return dataSourceType == DataSourceType.DATASOURCE_SCAN;
+    }
+
+    public void reset() {
+        root = null;
+        rootRef = null;
+        assignsAndUnnestsRefs.clear();
+        assignsAndUnnests.clear();
+        dataSourceRef = null;
+        dataSourceType = DataSourceType.NO_DATASOURCE;
+        dataset = null;
+        recordType = null;
+    }
+
+    public void getPrimaryKeyVars(List<LogicalVariable> target) throws AlgebricksException {
+        switch (dataSourceType) {
+            case DATASOURCE_SCAN:
+                DataSourceScanOperator dataSourceScan = (DataSourceScanOperator) dataSourceRef.getValue();
+                int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
+                for (int i = 0; i < numPrimaryKeys; i++) {
+                    target.add(dataSourceScan.getVariables().get(i));
+                }
+                break;
+            case PRIMARY_INDEX_LOOKUP:
+                UnnestMapOperator unnestMapOp = (UnnestMapOperator) dataSourceRef.getValue();
+                List<LogicalVariable> primaryKeys = null;
+                primaryKeys = AccessMethodUtils.getPrimaryKeyVarsFromPrimaryUnnestMap(dataset, unnestMapOp);
+                target.addAll(primaryKeys);
+                break;
+            case NO_DATASOURCE:
+            default:
+                throw new AlgebricksException("The subtree does not have any data source.");
+        }
+    }
+
+    public List<LogicalVariable> getDataSourceVariables() throws AlgebricksException {
+        switch (dataSourceType) {
+            case DATASOURCE_SCAN:
+            case EXTERNAL_SCAN:
+            case PRIMARY_INDEX_LOOKUP:
+                AbstractScanOperator scanOp = (AbstractScanOperator) dataSourceRef.getValue();
+                return scanOp.getVariables();
+            case COLLECTION_SCAN:
+                return new ArrayList<LogicalVariable>();
+            case NO_DATASOURCE:
+            default:
+                throw new AlgebricksException("The subtree does not have any data source.");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java
new file mode 100644
index 0000000..e2dffd1
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java
@@ -0,0 +1,255 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.annotations.SkipSecondaryIndexSearchExpressionAnnotation;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractDataSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+
+/**
+ * Class for helping rewrite rules to choose and apply RTree indexes.
+ */
+public class RTreeAccessMethod implements IAccessMethod {
+
+    private static List<FunctionIdentifier> funcIdents = new ArrayList<FunctionIdentifier>();
+    static {
+        funcIdents.add(AsterixBuiltinFunctions.SPATIAL_INTERSECT);
+    }
+
+    public static RTreeAccessMethod INSTANCE = new RTreeAccessMethod();
+
+    @Override
+    public List<FunctionIdentifier> getOptimizableFunctions() {
+        return funcIdents;
+    }
+
+    @Override
+    public boolean analyzeFuncExprArgs(AbstractFunctionCallExpression funcExpr,
+            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx) {
+        boolean matches = AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVar(funcExpr, analysisCtx);
+        if (!matches) {
+            matches = AccessMethodUtils.analyzeFuncExprArgsForTwoVars(funcExpr, analysisCtx);
+        }
+        return matches;
+    }
+
+    @Override
+    public boolean matchAllIndexExprs() {
+        return true;
+    }
+
+    @Override
+    public boolean matchPrefixIndexExprs() {
+        return false;
+    }
+
+    @Override
+    public boolean applySelectPlanTransformation(Mutable<ILogicalOperator> selectRef,
+            OptimizableOperatorSubTree subTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
+            IOptimizationContext context) throws AlgebricksException {
+        // TODO: We can probably do something smarter here based on selectivity or MBR area.
+        IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
+        ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(subTree, null, chosenIndex, optFuncExpr,
+                analysisCtx, false, false, false, context);
+        if (primaryIndexUnnestOp == null) {
+            return false;
+        }
+        // Replace the datasource scan with the new plan rooted at primaryIndexUnnestMap.
+        subTree.dataSourceRef.setValue(primaryIndexUnnestOp);
+        return true;
+    }
+
+    @Override
+    public boolean applyJoinPlanTransformation(Mutable<ILogicalOperator> joinRef,
+            OptimizableOperatorSubTree leftSubTree, OptimizableOperatorSubTree rightSubTree, Index chosenIndex,
+            AccessMethodAnalysisContext analysisCtx, IOptimizationContext context, boolean isLeftOuterJoin,
+            boolean hasGroupBy) throws AlgebricksException {
+        // Determine if the index is applicable on the left or right side (if both, we arbitrarily prefer the left side).
+        Dataset dataset = analysisCtx.indexDatasetMap.get(chosenIndex);
+        // Determine probe and index subtrees based on chosen index.
+        OptimizableOperatorSubTree indexSubTree = null;
+        OptimizableOperatorSubTree probeSubTree = null;
+        if (!isLeftOuterJoin && leftSubTree.hasDataSourceScan()
+                && dataset.getDatasetName().equals(leftSubTree.dataset.getDatasetName())) {
+            indexSubTree = leftSubTree;
+            probeSubTree = rightSubTree;
+        } else if (rightSubTree.hasDataSourceScan()
+                && dataset.getDatasetName().equals(rightSubTree.dataset.getDatasetName())) {
+            indexSubTree = rightSubTree;
+            probeSubTree = leftSubTree;
+        }
+        if (indexSubTree == null) {
+            //This may happen for left outer join case
+            return false;
+        }
+
+        LogicalVariable newNullPlaceHolderVar = null;
+        if (isLeftOuterJoin) {
+            //get a new null place holder variable that is the first field variable of the primary key
+            //from the indexSubTree's datasourceScanOp
+            newNullPlaceHolderVar = indexSubTree.getDataSourceVariables().get(0);
+        }
+
+        // TODO: We can probably do something smarter here based on selectivity or MBR area.
+        IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
+        ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(indexSubTree, probeSubTree, chosenIndex,
+                optFuncExpr, analysisCtx, true, isLeftOuterJoin, true, context);
+        if (primaryIndexUnnestOp == null) {
+            return false;
+        }
+
+        if (isLeftOuterJoin && hasGroupBy) {
+            //reset the null place holder variable
+            AccessMethodUtils.resetLOJNullPlaceholderVariableInGroupByOp(analysisCtx, newNullPlaceHolderVar, context);
+        }
+
+        indexSubTree.dataSourceRef.setValue(primaryIndexUnnestOp);
+        // Change join into a select with the same condition.
+        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) joinRef.getValue();
+        SelectOperator topSelect = new SelectOperator(joinOp.getCondition(), isLeftOuterJoin, newNullPlaceHolderVar);
+        topSelect.getInputs().add(indexSubTree.rootRef);
+        topSelect.setExecutionMode(ExecutionMode.LOCAL);
+        context.computeAndSetTypeEnvironmentForOperator(topSelect);
+        // Replace the original join with the new subtree rooted at the select op.
+        joinRef.setValue(topSelect);
+        return true;
+    }
+
+    private ILogicalOperator createSecondaryToPrimaryPlan(OptimizableOperatorSubTree indexSubTree,
+            OptimizableOperatorSubTree probeSubTree, Index chosenIndex, IOptimizableFuncExpr optFuncExpr,
+            AccessMethodAnalysisContext analysisCtx, boolean retainInput, boolean retainNull,
+            boolean requiresBroadcast, IOptimizationContext context) throws AlgebricksException {
+        Dataset dataset = indexSubTree.dataset;
+        ARecordType recordType = indexSubTree.recordType;
+
+        int optFieldIdx = AccessMethodUtils.chooseFirstOptFuncVar(chosenIndex, analysisCtx);
+        Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(optFuncExpr.getFieldType(optFieldIdx),
+                optFuncExpr.getFieldName(optFieldIdx), recordType);
+        if (keyPairType == null) {
+            return null;
+        }
+
+        // Get the number of dimensions corresponding to the field indexed by chosenIndex.
+        IAType spatialType = keyPairType.first;
+        int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
+        int numSecondaryKeys = numDimensions * 2;
+        // we made sure indexSubTree has datasource scan
+        AbstractDataSourceOperator dataSourceOp = (AbstractDataSourceOperator) indexSubTree.dataSourceRef.getValue();
+        RTreeJobGenParams jobGenParams = new RTreeJobGenParams(chosenIndex.getIndexName(), IndexType.RTREE,
+                dataset.getDataverseName(), dataset.getDatasetName(), retainInput, retainNull, requiresBroadcast);
+        // A spatial object is serialized in the constant of the func expr we are optimizing.
+        // The R-Tree expects as input an MBR represented with 1 field per dimension.
+        // Here we generate vars and funcs for extracting MBR fields from the constant into fields of a tuple (as the R-Tree expects them).
+        // List of variables for the assign.
+        ArrayList<LogicalVariable> keyVarList = new ArrayList<LogicalVariable>();
+        // List of expressions for the assign.
+        ArrayList<Mutable<ILogicalExpression>> keyExprList = new ArrayList<Mutable<ILogicalExpression>>();
+        Pair<ILogicalExpression, Boolean> returnedSearchKeyExpr = AccessMethodUtils.createSearchKeyExpr(optFuncExpr,
+                indexSubTree, probeSubTree);
+        ILogicalExpression searchKeyExpr = returnedSearchKeyExpr.first;
+
+        for (int i = 0; i < numSecondaryKeys; i++) {
+            // The create MBR function "extracts" one field of an MBR around the given spatial object.
+            AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CREATE_MBR));
+            // Spatial object is the constant from the func expr we are optimizing.
+            createMBR.getArguments().add(new MutableObject<ILogicalExpression>(searchKeyExpr));
+            // The number of dimensions.
+            createMBR.getArguments().add(
+                    new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(
+                            numDimensions)))));
+            // Which part of the MBR to extract.
+            createMBR.getArguments().add(
+                    new MutableObject<ILogicalExpression>(new ConstantExpression(
+                            new AsterixConstantValue(new AInt32(i)))));
+            // Add a variable and its expr to the lists which will be passed into an assign op.
+            LogicalVariable keyVar = context.newVar();
+            keyVarList.add(keyVar);
+            keyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
+        }
+        jobGenParams.setKeyVarList(keyVarList);
+
+        // Assign operator that "extracts" the MBR fields from the func-expr constant into a tuple.
+        AssignOperator assignSearchKeys = new AssignOperator(keyVarList, keyExprList);
+        if (probeSubTree == null) {
+            // We are optimizing a selection query.
+            // Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
+            assignSearchKeys.getInputs().add(dataSourceOp.getInputs().get(0));
+            assignSearchKeys.setExecutionMode(dataSourceOp.getExecutionMode());
+        } else {
+            // We are optimizing a join, place the assign op top of the probe subtree.
+            assignSearchKeys.getInputs().add(probeSubTree.rootRef);
+        }
+
+        UnnestMapOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType,
+                chosenIndex, assignSearchKeys, jobGenParams, context, false, retainInput);
+
+        // Generate the rest of the upstream plan which feeds the search results into the primary index.
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            ExternalDataLookupOperator externalDataAccessOp = AccessMethodUtils.createExternalDataLookupUnnestMap(
+                    dataSourceOp, dataset, recordType, secondaryIndexUnnestOp, context, chosenIndex, retainInput,
+                    retainNull);
+            return externalDataAccessOp;
+        } else {
+            UnnestMapOperator primaryIndexUnnestOp = AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceOp,
+                    dataset, recordType, secondaryIndexUnnestOp, context, true, retainInput, false, false);
+
+            return primaryIndexUnnestOp;
+        }
+    }
+
+    @Override
+    public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
+        if (optFuncExpr.getFuncExpr().getAnnotations()
+                .containsKey(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE)) {
+            return false;
+        }
+        // No additional analysis required.
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java
new file mode 100644
index 0000000..9b6ff5f
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+
+/**
+ * Helper class for reading and writing job-gen parameters for RTree access methods to
+ * and from a list of function arguments, typically of an unnest-map.
+ */
+public class RTreeJobGenParams extends AccessMethodJobGenParams {
+
+    protected List<LogicalVariable> keyVarList;
+
+    public RTreeJobGenParams() {
+    }
+
+    public RTreeJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
+            boolean retainInput, boolean retainNull, boolean requiresBroadcast) {
+        super(indexName, indexType, dataverseName, datasetName, retainInput, retainNull, requiresBroadcast);
+    }
+
+    public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
+        super.writeToFuncArgs(funcArgs);
+        writeVarList(keyVarList, funcArgs);
+    }
+
+    public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
+        super.readFromFuncArgs(funcArgs);
+        int index = super.getNumParams();
+        keyVarList = new ArrayList<LogicalVariable>();
+        readVarList(funcArgs, index, keyVarList);
+    }
+
+    public void setKeyVarList(List<LogicalVariable> keyVarList) {
+        this.keyVarList = keyVarList;
+    }
+
+    public List<LogicalVariable> getKeyVarList() {
+        return keyVarList;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
new file mode 100644
index 0000000..567d85d
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.temporal;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Finds interval conditional expressions and convert them to interval start and end conditional statements.
+ * The translation exposes the condition to the Algebricks optimizer.
+ */
+public class TranslateIntervalExpressionRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        SelectOperator selectOp = (SelectOperator) op;
+
+        Mutable<ILogicalExpression> exprRef = selectOp.getCondition();
+        boolean modified = false;
+        ILogicalExpression expr = exprRef.getValue();
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        if (funcExpr.getArguments().size() != 2) {
+            return false;
+        }
+        ILogicalExpression interval1 = funcExpr.getArguments().get(0).getValue();
+        ILogicalExpression interval2 = funcExpr.getArguments().get(1).getValue();
+        if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_MEETS)) {
+            exprRef.setValue(getEqualExpr(getIntervalEndExpr(interval1), getIntervalStartExpr(interval2)));
+            modified = true;
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_MET_BY)) {
+            exprRef.setValue(getEqualExpr(getIntervalStartExpr(interval1), getIntervalEndExpr(interval2)));
+            modified = true;
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_STARTS)) {
+            ILogicalExpression startExpr = getEqualExpr(getIntervalStartExpr(interval1),
+                    getIntervalStartExpr(interval2));
+            ILogicalExpression endExpr = getLessThanOrEqualExpr(getIntervalEndExpr(interval1),
+                    getIntervalEndExpr(interval2));
+            exprRef.setValue(getAndExpr(startExpr, endExpr));
+            modified = true;
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_STARTED_BY)) {
+            ILogicalExpression startExpr = getEqualExpr(getIntervalStartExpr(interval1),
+                    getIntervalStartExpr(interval2));
+            ILogicalExpression endExpr = getLessThanOrEqualExpr(getIntervalEndExpr(interval2),
+                    getIntervalEndExpr(interval1));
+            exprRef.setValue(getAndExpr(startExpr, endExpr));
+            modified = true;
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_ENDS)) {
+            ILogicalExpression endExpr = getEqualExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
+            ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval1),
+                    getIntervalStartExpr(interval2));
+            exprRef.setValue(getAndExpr(startExpr, endExpr));
+            modified = true;
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_ENDED_BY)) {
+            ILogicalExpression endExpr = getEqualExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
+            ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval2),
+                    getIntervalStartExpr(interval1));
+            exprRef.setValue(getAndExpr(startExpr, endExpr));
+            modified = true;
+        } else if (funcExpr.getFunctionInfo().equals(AsterixBuiltinFunctions.INTERVAL_BEFORE)) {
+            // Requires new strategy, no translation for this interval and the remaining listed.
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_AFTER)) {
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_OVERLAPS)) {
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_OVERLAPPED_BY)) {
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_OVERLAPPING)) {
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_COVERS)) {
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_COVERED_BY)) {
+        }
+
+        return modified;
+    }
+
+    private ILogicalExpression getAndExpr(ILogicalExpression arg1, ILogicalExpression arg2) {
+        return getScalarExpr(AlgebricksBuiltinFunctions.AND, arg1, arg2);
+    }
+
+    private ILogicalExpression getEqualExpr(ILogicalExpression arg1, ILogicalExpression arg2) {
+        return getScalarExpr(AlgebricksBuiltinFunctions.EQ, arg1, arg2);
+    }
+
+    private ILogicalExpression getLessThanOrEqualExpr(ILogicalExpression arg1, ILogicalExpression arg2) {
+        return getScalarExpr(AlgebricksBuiltinFunctions.LE, arg1, arg2);
+    }
+
+    private ILogicalExpression getIntervalStartExpr(ILogicalExpression interval) {
+        return getScalarExpr(AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_START, interval);
+    }
+
+    private ILogicalExpression getIntervalEndExpr(ILogicalExpression interval) {
+        return getScalarExpr(AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_END, interval);
+    }
+
+    private ILogicalExpression getScalarExpr(FunctionIdentifier func, ILogicalExpression interval) {
+        List<Mutable<ILogicalExpression>> intervalArg = new ArrayList<Mutable<ILogicalExpression>>();
+        intervalArg.add(new MutableObject<ILogicalExpression>(interval));
+        return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(func), intervalArg);
+    }
+
+    private ILogicalExpression getScalarExpr(FunctionIdentifier func, ILogicalExpression interval1,
+            ILogicalExpression interval2) {
+        List<Mutable<ILogicalExpression>> intervalArg = new ArrayList<Mutable<ILogicalExpression>>();
+        intervalArg.add(new MutableObject<ILogicalExpression>(interval1));
+        intervalArg.add(new MutableObject<ILogicalExpression>(interval2));
+        return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(func), intervalArg);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
new file mode 100644
index 0000000..f6bf3c3
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
@@ -0,0 +1,548 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules.typecast;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.AbstractCollectionType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+
+/**
+ * This class is utility to do type cast.
+ * It offers two public methods:
+ * 1. public static boolean rewriteListExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+ * IVariableTypeEnvironment env) throws AlgebricksException, which only enforces the list type recursively.
+ * 2. public static boolean rewriteFuncExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+ * IVariableTypeEnvironment env) throws AlgebricksException, which enforces the list type and the record type recursively.
+ *
+ * @author yingyib
+ */
+public class StaticTypeCastUtil {
+
+    /**
+     * This method is only called when funcExpr contains list constructor function calls.
+     * The List constructor is very special because a nested list is of type List<ANY>.
+     * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
+     * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceEnforcedListTypeRule} .
+     * 
+     * @param funcExpr
+     *            record constructor function expression
+     * @param requiredListType
+     *            required record type
+     * @param inputRecordType
+     * @param env
+     *            type environment
+     * @throws AlgebricksException
+     */
+    public static boolean rewriteListExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+            IVariableTypeEnvironment env) throws AlgebricksException {
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR) {
+            if (reqType.equals(BuiltinType.ANY)) {
+                reqType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+            }
+            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+                    env);
+        } else if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR) {
+            if (reqType.equals(BuiltinType.ANY)) {
+                reqType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+            }
+            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+                    env);
+        } else {
+            List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+            boolean changed = false;
+            for (Mutable<ILogicalExpression> arg : args) {
+                ILogicalExpression argExpr = arg.getValue();
+                if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) argExpr;
+                    IAType exprType = (IAType) env.getType(argFuncExpr);
+                    changed = rewriteListExpr(argFuncExpr, exprType, exprType, env) || changed;
+                }
+            }
+            return changed;
+        }
+    }
+
+    /**
+     * This method is to recursively enforce required types, for the list type and the record type.
+     * The List constructor is very special because
+     * 1. a nested list in a list is of type List<ANY>;
+     * 2. a nested record in a list is of type Open_Record{}.
+     * The open record constructor is very special because
+     * 1. a nested list in the open part is of type List<ANY>;
+     * 2. a nested record in the open part is of type Open_Record{}.
+     * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
+     * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceStaticTypeCastRule} .
+     * 
+     * @param funcExpr
+     *            the function expression whose type needs to be top-down enforced
+     * @param reqType
+     *            the required type inferred from parent operators/expressions
+     * @param inputType
+     *            the current inferred
+     * @param env
+     *            the type environment
+     * @return true if the type is casted; otherwise, false.
+     * @throws AlgebricksException
+     */
+    public static boolean rewriteFuncExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+            IVariableTypeEnvironment env) throws AlgebricksException {
+        /**
+         * sanity check: if there are list(ordered or unordered)/record variable expressions in the funcExpr, we will not do STATIC type casting
+         * because they are not "statically cast-able".
+         * instead, the record will be dynamically casted at the runtime
+         */
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR) {
+            if (reqType.equals(BuiltinType.ANY)) {
+                reqType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+            }
+            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+                    env);
+        } else if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR) {
+            if (reqType.equals(BuiltinType.ANY)) {
+                reqType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+            }
+            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+                    env);
+        } else if (inputType.getTypeTag().equals(ATypeTag.RECORD)) {
+            if (reqType.equals(BuiltinType.ANY)) {
+                reqType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
+            }
+            return rewriteRecordFuncExpr(funcExpr, (ARecordType) reqType, (ARecordType) inputType, env);
+        } else {
+            List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+            boolean changed = false;
+            for (Mutable<ILogicalExpression> arg : args) {
+                ILogicalExpression argExpr = arg.getValue();
+                if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) argExpr;
+                    IAType exprType = (IAType) env.getType(argFuncExpr);
+                    changed = changed || rewriteFuncExpr(argFuncExpr, exprType, exprType, env);
+                }
+            }
+            if (!compatible(reqType, inputType)) {
+                throw new AlgebricksException("type mismatch, required: " + reqType.toString() + " actual: "
+                        + inputType.toString());
+            }
+            return changed;
+        }
+    }
+
+    /**
+     * only called when funcExpr is record constructor
+     * 
+     * @param funcExpr
+     *            record constructor function expression
+     * @param requiredListType
+     *            required record type
+     * @param inputRecordType
+     * @param env
+     *            type environment
+     * @throws AlgebricksException
+     */
+    private static boolean rewriteRecordFuncExpr(AbstractFunctionCallExpression funcExpr,
+            ARecordType requiredRecordType, ARecordType inputRecordType, IVariableTypeEnvironment env)
+            throws AlgebricksException {
+        // if already rewritten, the required type is not null
+        if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
+            return false;
+        boolean casted = staticRecordTypeCast(funcExpr, requiredRecordType, inputRecordType, env);
+        if (casted) {
+            //enforce the required type if it is statically casted
+            TypeComputerUtilities.setRequiredAndInputTypes(funcExpr, requiredRecordType, inputRecordType);
+        }
+        return casted;
+    }
+
+    /**
+     * only called when funcExpr is list constructor
+     * 
+     * @param funcExpr
+     *            list constructor function expression
+     * @param requiredListType
+     *            required list type
+     * @param inputListType
+     * @param env
+     *            type environment
+     * @throws AlgebricksException
+     */
+    private static boolean rewriteListFuncExpr(AbstractFunctionCallExpression funcExpr,
+            AbstractCollectionType requiredListType, AbstractCollectionType inputListType, IVariableTypeEnvironment env)
+            throws AlgebricksException {
+        if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
+            return false;
+
+        TypeComputerUtilities.setRequiredAndInputTypes(funcExpr, requiredListType, inputListType);
+        List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+
+        IAType itemType = requiredListType.getItemType();
+        IAType inputItemType = inputListType.getItemType();
+        boolean changed = false;
+        for (int j = 0; j < args.size(); j++) {
+            ILogicalExpression arg = args.get(j).getValue();
+            IAType currentItemType = (inputItemType == null || inputItemType == BuiltinType.ANY) ? (IAType) env
+                    .getType(arg) : inputItemType;
+            switch (arg.getExpressionTag()) {
+                case FUNCTION_CALL:
+                    ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) arg;
+                    changed = rewriteFuncExpr(argFunc, itemType, currentItemType, env) || changed;
+                    break;
+                case VARIABLE:
+                    changed = injectCastToRelaxType(args.get(j), currentItemType, env) || changed;
+                    break;
+            }
+        }
+        return changed;
+    }
+
+    /**
+     * This method statically cast the type of records from their current type to the required type.
+     * 
+     * @param func
+     *            The record constructor expression.
+     * @param reqType
+     *            The required type.
+     * @param inputType
+     *            The current type.
+     * @param env
+     *            The type environment.
+     * @throws AlgebricksException
+     */
+    private static boolean staticRecordTypeCast(AbstractFunctionCallExpression func, ARecordType reqType,
+            ARecordType inputType, IVariableTypeEnvironment env) throws AlgebricksException {
+        if (!(func.getFunctionIdentifier() == AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR || func
+                .getFunctionIdentifier() == AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
+            return false;
+        }
+        IAType[] reqFieldTypes = reqType.getFieldTypes();
+        String[] reqFieldNames = reqType.getFieldNames();
+        IAType[] inputFieldTypes = inputType.getFieldTypes();
+        String[] inputFieldNames = inputType.getFieldNames();
+
+        int[] fieldPermutation = new int[reqFieldTypes.length];
+        boolean[] nullFields = new boolean[reqFieldTypes.length];
+        boolean[] openFields = new boolean[inputFieldTypes.length];
+
+        Arrays.fill(nullFields, false);
+        Arrays.fill(openFields, true);
+        Arrays.fill(fieldPermutation, -1);
+
+        // forward match: match from actual to required
+        boolean matched = false;
+        for (int i = 0; i < inputFieldNames.length; i++) {
+            String fieldName = inputFieldNames[i];
+            IAType fieldType = inputFieldTypes[i];
+
+            if (2 * i + 1 > func.getArguments().size()) {
+                // it is not a record constructor function
+                return false;
+            }
+
+            // 2*i+1 is the index of field value expression
+            ILogicalExpression arg = func.getArguments().get(2 * i + 1).getValue();
+            matched = false;
+            for (int j = 0; j < reqFieldNames.length; j++) {
+                String reqFieldName = reqFieldNames[j];
+                IAType reqFieldType = reqFieldTypes[j];
+                if (fieldName.equals(reqFieldName)) {
+                    //type matched
+                    if (fieldType.equals(reqFieldType)) {
+                        fieldPermutation[j] = i;
+                        openFields[i] = false;
+                        matched = true;
+
+                        if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                            ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
+                            rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
+                        }
+                        break;
+                    }
+
+                    // match the optional field
+                    if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
+                        IAType itemType = ((AUnionType) reqFieldType).getNullableType();
+                        reqFieldType = itemType;
+                        if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
+                            fieldPermutation[j] = i;
+                            openFields[i] = false;
+                            matched = true;
+
+                            // rewrite record expr
+                            if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                                ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
+                                rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
+                            }
+                            break;
+                        }
+                    }
+
+                    // match the optional type input for a non-optional field
+                    // delay that to runtime by calling the not-null function
+                    if (NonTaggedFormatUtil.isOptional(fieldType)) {
+                        IAType itemType = ((AUnionType) fieldType).getNullableType();
+                        if (reqFieldType.equals(itemType)) {
+                            fieldPermutation[j] = i;
+                            openFields[i] = false;
+                            matched = true;
+
+                            ScalarFunctionCallExpression notNullFunc = new ScalarFunctionCallExpression(
+                                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT_NULL));
+                            notNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(arg));
+                            //wrap the not null function to the original function
+                            func.getArguments().get(2 * i + 1).setValue(notNullFunc);
+                            break;
+                        }
+                    }
+
+                    // match the record field: need cast
+                    if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                        ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
+                        rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
+                        fieldPermutation[j] = i;
+                        openFields[i] = false;
+                        matched = true;
+                        break;
+                    }
+                }
+            }
+            // the input has extra fields
+            if (!matched && !reqType.isOpen()) {
+                throw new AlgebricksException("static type mismatch: the input record includes an extra closed field "
+                        + fieldName + ":" + fieldType + "! Please check the field name and type.");
+            }
+        }
+
+        // backward match: match from required to actual
+        for (int i = 0; i < reqFieldNames.length; i++) {
+            String reqFieldName = reqFieldNames[i];
+            IAType reqFieldType = reqFieldTypes[i];
+            matched = false;
+            for (int j = 0; j < inputFieldNames.length; j++) {
+                String fieldName = inputFieldNames[j];
+                IAType fieldType = inputFieldTypes[j];
+                if (!fieldName.equals(reqFieldName))
+                    continue;
+                // should check open field here
+                // because number of entries in fieldPermuations is the
+                // number of required schema fields
+                // here we want to check if an input field is matched
+                // the entry index of fieldPermuatons is req field index
+                if (!openFields[j]) {
+                    matched = true;
+                    break;
+                }
+
+                // match the optional field
+                if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
+                    IAType itemType = ((AUnionType) reqFieldType).getNullableType();
+                    if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
+                        matched = true;
+                        break;
+                    }
+                }
+            }
+            if (matched)
+                continue;
+
+            if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
+                // add a null field
+                nullFields[i] = true;
+            } else {
+                // no matched field in the input for a required closed field
+                if (inputType.isOpen()) {
+                    //if the input type is open, return false, give that to dynamic type cast to defer the error to the runtime
+                    return false;
+                } else {
+                    throw new AlgebricksException(
+                            "static type mismatch: the input record misses a required closed field " + reqFieldName
+                                    + ":" + reqFieldType + "! Please check the field name and type.");
+                }
+            }
+        }
+
+        List<Mutable<ILogicalExpression>> arguments = func.getArguments();
+        List<Mutable<ILogicalExpression>> originalArguments = new ArrayList<Mutable<ILogicalExpression>>();
+        originalArguments.addAll(arguments);
+        arguments.clear();
+        // re-order the closed part and fill in null fields
+        for (int i = 0; i < fieldPermutation.length; i++) {
+            int pos = fieldPermutation[i];
+            if (pos >= 0) {
+                arguments.add(originalArguments.get(2 * pos));
+                arguments.add(originalArguments.get(2 * pos + 1));
+            }
+            if (nullFields[i]) {
+                // add a null field
+                arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                        new AString(reqFieldNames[i])))));
+                arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                        ANull.NULL))));
+            }
+        }
+
+        // add the open part
+        for (int i = 0; i < openFields.length; i++) {
+            if (openFields[i]) {
+                arguments.add(originalArguments.get(2 * i));
+                Mutable<ILogicalExpression> expRef = originalArguments.get(2 * i + 1);
+                injectCastToRelaxType(expRef, inputFieldTypes[i], env);
+                arguments.add(expRef);
+            }
+        }
+        return true;
+    }
+
+    private static boolean injectCastToRelaxType(Mutable<ILogicalExpression> expRef, IAType inputFieldType,
+            IVariableTypeEnvironment env) throws AlgebricksException {
+        ILogicalExpression argExpr = expRef.getValue();
+        List<LogicalVariable> parameterVars = new ArrayList<LogicalVariable>();
+        argExpr.getUsedVariables(parameterVars);
+        // we need to handle open fields recursively by their default
+        // types
+        // for list, their item type is any
+        // for record, their
+        boolean castInjected = false;
+        if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL
+                || argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+            IAType reqFieldType = inputFieldType;
+            FunctionIdentifier fi = null;
+            // do not enforce nested type in the case of no-used variables
+            switch (inputFieldType.getTypeTag()) {
+                case RECORD:
+                    reqFieldType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
+                    fi = AsterixBuiltinFunctions.CAST_RECORD;
+                    break;
+                case ORDEREDLIST:
+                    reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+                    fi = AsterixBuiltinFunctions.CAST_LIST;
+                    break;
+                case UNORDEREDLIST:
+                    reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+                    fi = AsterixBuiltinFunctions.CAST_LIST;
+            }
+            if (fi != null && !inputFieldType.equals(reqFieldType) && parameterVars.size() > 0) {
+                //inject dynamic type casting
+                injectCastFunction(FunctionUtils.getFunctionInfo(fi), reqFieldType, inputFieldType, expRef, argExpr);
+                castInjected = true;
+            }
+            if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                //recursively rewrite function arguments
+                if (TypeComputerUtilities.getRequiredType((AbstractFunctionCallExpression) argExpr) == null
+                        && reqFieldType != null) {
+                    if (castInjected) {
+                        //rewrite the arg expression inside the dynamic cast
+                        ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
+                        rewriteFuncExpr(argFunc, inputFieldType, inputFieldType, env);
+                    } else {
+                        //rewrite arg
+                        ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
+                        rewriteFuncExpr(argFunc, reqFieldType, inputFieldType, env);
+                    }
+                }
+            }
+        }
+        return castInjected;
+    }
+
+    /**
+     * Inject a dynamic cast function wrapping an existing expression
+     * 
+     * @param funcInfo
+     *            the cast function
+     * @param reqType
+     *            the required type
+     * @param inputType
+     *            the original type
+     * @param exprRef
+     *            the expression reference
+     * @param argExpr
+     *            the original expression
+     */
+    private static void injectCastFunction(IFunctionInfo funcInfo, IAType reqType, IAType inputType,
+            Mutable<ILogicalExpression> exprRef, ILogicalExpression argExpr) {
+        ScalarFunctionCallExpression cast = new ScalarFunctionCallExpression(funcInfo);
+        cast.getArguments().add(new MutableObject<ILogicalExpression>(argExpr));
+        exprRef.setValue(cast);
+        TypeComputerUtilities.setRequiredAndInputTypes(cast, reqType, inputType);
+    }
+
+    /**
+     * Determine if two types are compatible
+     * 
+     * @param reqType
+     *            the required type
+     * @param inputType
+     *            the input type
+     * @return true if the two types are compatible; false otherwise
+     */
+    public static boolean compatible(IAType reqType, IAType inputType) {
+        if (reqType.getTypeTag() == ATypeTag.ANY || inputType.getTypeTag() == ATypeTag.ANY) {
+            return true;
+        }
+        if (reqType.getTypeTag() != ATypeTag.UNION && inputType.getTypeTag() != ATypeTag.UNION) {
+            if (reqType.equals(inputType)) {
+                return true;
+            } else {
+                return false;
+            }
+        }
+        Set<IAType> reqTypePossible = new HashSet<IAType>();
+        Set<IAType> inputTypePossible = new HashSet<IAType>();
+        if (reqType.getTypeTag() == ATypeTag.UNION) {
+            AUnionType unionType = (AUnionType) reqType;
+            reqTypePossible.addAll(unionType.getUnionList());
+        } else {
+            reqTypePossible.add(reqType);
+        }
+
+        if (inputType.getTypeTag() == ATypeTag.UNION) {
+            AUnionType unionType = (AUnionType) inputType;
+            inputTypePossible.addAll(unionType.getUnionList());
+        } else {
+            inputTypePossible.add(inputType);
+        }
+        return reqTypePossible.equals(inputTypePossible);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java
new file mode 100644
index 0000000..6c82c11
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules.util;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.MutableObject;
+import org.mortbay.util.SingletonList;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.EquivalenceClass;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+
+public class EquivalenceClassUtils {
+
+    /**
+     * Adds equivalent classes for primary index accesses, including unnest-map for
+     * primary index access and data source scan through primary index ---
+     * one equivalent class between a primary key variable and a record field-access expression.
+     * 
+     * @param operator
+     *            , the primary index access operator.
+     * @param indexSearchVars
+     *            , the returned variables from primary index access. The last variable
+     *            is the record variable.
+     * @param recordType
+     *            , the record type of an index payload record.
+     * @param dataset
+     *            , the accessed dataset.
+     * @param context
+     *            , the optimization context.
+     * @throws AlgebricksException
+     */
+    @SuppressWarnings("unchecked")
+    public static void addEquivalenceClassesForPrimaryIndexAccess(ILogicalOperator operator,
+            List<LogicalVariable> indexSearchVars, ARecordType recordType, Dataset dataset, IOptimizationContext context)
+            throws AlgebricksException {
+        if (dataset.getDatasetDetails().getDatasetType() != DatasetType.INTERNAL) {
+            return;
+        }
+        InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
+        List<List<String>> primaryKey = datasetDetails.getPrimaryKey();
+        Map<String, Integer> fieldNameToIndexMap = new HashMap<String, Integer>();
+        String[] fieldNames = recordType.getFieldNames();
+        for (int fieldIndex = 0; fieldIndex < fieldNames.length; ++fieldIndex) {
+            fieldNameToIndexMap.put(fieldNames[fieldIndex], fieldIndex);
+        }
+
+        LogicalVariable recordVar = indexSearchVars.get(indexSearchVars.size() - 1);
+        for (int pkIndex = 0; pkIndex < primaryKey.size(); ++pkIndex) {
+            String pkFieldName = primaryKey.get(pkIndex).get(0);
+            int fieldIndexInRecord = fieldNameToIndexMap.get(pkFieldName);
+            LogicalVariable var = indexSearchVars.get(pkIndex);
+            ILogicalExpression expr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX),
+                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(recordVar)),
+                    new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(
+                            fieldIndexInRecord)))));
+            EquivalenceClass equivClass = new EquivalenceClass(SingletonList.newSingletonList(var), var,
+                    SingletonList.newSingletonList(expr));
+            Map<LogicalVariable, EquivalenceClass> equivalenceMap = context.getEquivalenceClassMap(operator);
+            if (equivalenceMap == null) {
+                equivalenceMap = new HashMap<LogicalVariable, EquivalenceClass>();
+                context.putEquivalenceClassMap(operator, equivalenceMap);
+            }
+            equivalenceMap.put(var, equivClass);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractAqlTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractAqlTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractAqlTranslator.java
new file mode 100644
index 0000000..1c31da0
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractAqlTranslator.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.DatasetDecl;
+import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
+import edu.uci.ics.asterix.aql.expression.DeleteStatement;
+import edu.uci.ics.asterix.aql.expression.DropStatement;
+import edu.uci.ics.asterix.aql.expression.InsertStatement;
+import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
+import edu.uci.ics.asterix.common.api.IClusterManagementWork.ClusterState;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
+import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints;
+import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+
+/**
+ * Base class for AQL translators. Contains the common validation logic for AQL
+ * statements.
+ */
+public abstract class AbstractAqlTranslator {
+
+    protected static final Logger LOGGER = Logger.getLogger(AbstractAqlTranslator.class.getName());
+
+    protected static final Map<String, BuiltinType> builtinTypeMap = AsterixBuiltinTypeMap.getBuiltinTypes();
+
+    public void validateOperation(Dataverse defaultDataverse, Statement stmt) throws AsterixException {
+
+        if (!(AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.ACTIVE) && AsterixClusterProperties.INSTANCE
+                .isGlobalRecoveryCompleted())) {
+            int maxWaitCycles = AsterixAppContextInfo.getInstance().getExternalProperties().getMaxWaitClusterActive();
+            int waitCycleCount = 0;
+            try {
+                while (!AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.ACTIVE)
+                        && waitCycleCount < maxWaitCycles) {
+                    Thread.sleep(1000);
+                    waitCycleCount++;
+                }
+            } catch (InterruptedException e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Thread interrupted while waiting for cluster to be "
+                            + ClusterState.ACTIVE);
+                }
+            }
+            if (!AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
+                throw new AsterixException(" Asterix Cluster is in " + ClusterState.UNUSABLE
+                        + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
+            } else {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
+                }
+            }
+        }
+
+        if (AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
+            throw new AsterixException(" Asterix Cluster is in " + ClusterState.UNUSABLE + " state."
+                    + "\n One or more Node Controllers have left.\n");
+        }
+
+        if (!AsterixClusterProperties.INSTANCE.isGlobalRecoveryCompleted()) {
+            int maxWaitCycles = AsterixAppContextInfo.getInstance().getExternalProperties().getMaxWaitClusterActive();
+            int waitCycleCount = 0;
+            try {
+                while (!AsterixClusterProperties.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
+                    Thread.sleep(1000);
+                    waitCycleCount++;
+                }
+            } catch (InterruptedException e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
+                }
+            }
+            if (!AsterixClusterProperties.INSTANCE.isGlobalRecoveryCompleted()) {
+                throw new AsterixException(" Asterix Cluster Global recovery is not yet complete and The system is in "
+                        + ClusterState.ACTIVE + " state");
+            }
+        }
+
+        boolean invalidOperation = false;
+        String message = null;
+        String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
+        switch (stmt.getKind()) {
+            case INSERT:
+                InsertStatement insertStmt = (InsertStatement) stmt;
+                if (insertStmt.getDataverseName() != null) {
+                    dataverse = insertStmt.getDataverseName().getValue();
+                }
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+                if (invalidOperation) {
+                    message = "Insert operation is not permitted in dataverse "
+                            + MetadataConstants.METADATA_DATAVERSE_NAME;
+                }
+                break;
+
+            case DELETE:
+                DeleteStatement deleteStmt = (DeleteStatement) stmt;
+                if (deleteStmt.getDataverseName() != null) {
+                    dataverse = deleteStmt.getDataverseName().getValue();
+                }
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+                if (invalidOperation) {
+                    message = "Delete operation is not permitted in dataverse "
+                            + MetadataConstants.METADATA_DATAVERSE_NAME;
+                }
+                break;
+
+            case NODEGROUP_DROP:
+                String nodegroupName = ((NodeGroupDropStatement) stmt).getNodeGroupName().getValue();
+                invalidOperation = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME.equals(nodegroupName);
+                if (invalidOperation) {
+                    message = "Cannot drop nodegroup:" + nodegroupName;
+                }
+                break;
+
+            case DATAVERSE_DROP:
+                DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName()
+                        .getValue());
+                if (invalidOperation) {
+                    message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
+                }
+                break;
+
+            case DATASET_DROP:
+                DropStatement dropStmt = (DropStatement) stmt;
+                if (dropStmt.getDataverseName() != null) {
+                    dataverse = dropStmt.getDataverseName().getValue();
+                }
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+                if (invalidOperation) {
+                    message = "Cannot drop a dataset belonging to the dataverse:"
+                            + MetadataConstants.METADATA_DATAVERSE_NAME;
+                }
+                break;
+            case DATASET_DECL:
+                DatasetDecl datasetStmt = (DatasetDecl) stmt;
+                Map<String, String> hints = datasetStmt.getHints();
+                if (hints != null && !hints.isEmpty()) {
+                    Pair<Boolean, String> validationResult = null;
+                    StringBuffer errorMsgBuffer = new StringBuffer();
+                    for (Entry<String, String> hint : hints.entrySet()) {
+                        validationResult = DatasetHints.validate(hint.getKey(), hint.getValue());
+                        if (!validationResult.first) {
+                            errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue()
+                                    + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
+                            errorMsgBuffer.append(" \n");
+                        }
+                    }
+                    invalidOperation = errorMsgBuffer.length() > 0;
+                    if (invalidOperation) {
+                        message = errorMsgBuffer.toString();
+                    }
+                }
+                break;
+
+        }
+
+        if (invalidOperation) {
+            throw new AsterixException("Invalid operation - " + message);
+        }
+    }
+}